answer
stringlengths 17
10.2M
|
|---|
package se.chalmers.dat255.sleepfighter.challenge.motionsnake;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Deque;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import se.chalmers.dat255.sleepfighter.utils.geometry.Dimension;
import se.chalmers.dat255.sleepfighter.utils.geometry.Direction;
import se.chalmers.dat255.sleepfighter.utils.geometry.Position;
public class SnakeModel {
/** Random Number Generator (RNG) */
private final Random rng;
/** The position of the current snake-food. */
private Position currFoodPos;
/** A list containing all snake positions. */
private final Deque<Position> snakePos;
/** A list containing all empty positions. */
private final List<Position> emptyPos;
/** The direction of the snake. */
private Direction direction;
/** The number of snake-food eaten. */
private int score;
/** The size of the board. */
private Dimension boardSize;
/** The size of a tile. */
private int tileSize;
/** Whether game is over or not. */
private boolean gameOver;
/**
* Returns the current food position.
*
* @return Returns the current food position.
*/
public Position getFoodPosition() {
return this.currFoodPos;
}
/**
* Checks if food exists on the board.
*
* @return True, if there is food.
*/
public boolean hasFood() {
return currFoodPos != null;
}
/**
* Returns an immutable view of the collection of snakes positions.
*
* @return Collection of snakes positions.
*/
public Collection<Position> getSnakePositions() {
return Collections.unmodifiableCollection(this.snakePos);
}
/**
* Returns an immutable view of the list of empty positions.
*
* @return List of empty positions.
*/
public List<Position> getEmptyPositions() {
return Collections.unmodifiableList(this.emptyPos);
}
/**
* Returns the size of the game board as a Dimension object.
*
* @return Size of the game board.
*/
public Dimension getBoardSize() {
return boardSize;
}
public boolean isGameOver() {
return gameOver;
}
/**
* Returns whether or not a position is the head of the snake. Since snake
* always has a head this method will never cause NullPointerException.
*
* @return true if position is the head of snake, otherwise false.
*/
public boolean isPositionHead(Position pos) {
return this.snakePos.peekFirst().equals(pos);
}
/**
* Creates a new model of snake-game.
*
* @param size
* The size of the board.
* @param startDirection
* The direction where snakes head will be pointed at.
* @param rng
* Random Number Generator.
*/
public SnakeModel(Dimension size, Direction startDirection, Random rng) {
// Init RNG.
this.rng = rng;
// Set start direction.
this.direction = startDirection;
// Set board size.
this.boardSize = size;
// Set tile size.
this.tileSize = SnakeConstants.getTileSize();
// Set game status.
this.gameOver = false;
// Blank out the whole gameboard.
this.emptyPos = new ArrayList<Position>(size.getWidth()
* size.getHeight() - this.tileSize);
for (int i = this.tileSize; i < size.getWidth(); i++) {
for (int j = this.tileSize; j < size.getHeight(); j++) {
this.emptyPos.add(new Position(i, j));
}
}
/*
* Insert the snake, starting with first one at the middle.
*
* In order not to make it too difficult for client we will add
* snake-tiles exceeding the first one to opposite of
* starting-direction.
*
* Then we must add all exceeding first to the end so that the head (
* index = 0 ) is first in starting direction. The starting direction
* must then be reset to default.
*/
this.snakePos = new LinkedList<Position>();
this.addSnake(new Position((int) Math.ceil(size.getWidth() / 2),
(int) Math.ceil(size.getHeight() / 2)));
int initLength = SnakeConstants.getInitialSnakeLength();
if (initLength > 1) {
Direction oldDirection = this.direction;
this.direction = this.direction.getOpposite();
for (int i = 1; i < initLength; i++) {
this.addSnake(this.getNextSnakePos());
}
this.direction = oldDirection;
}
// Add starting snake-food.
this.addFood();
}
/**
* Update the direction of the snake.
*
* @param The
* wanted direction.
*/
public void updateDirection(Direction newDirection) {
// Don't change direction if it is opposite to current one or if it is
// contained by snake.
// This is one of the features of THIS snake.
if (!newDirection.isOpposite(this.direction)
|| snakePos.contains(snakePos.peek()
.moveDirection(newDirection))) {
this.direction = newDirection;
}
}
/**
* Updates the model periodically. Central game logic for snake.
*
* @throws GameOverException
* When game is over.
*/
public void tickUpdate() throws GameOverException {
// Get the new head-position of snake.
Position newHeadPos = this.getNextSnakePos();
// Check if there's food at the snakes head.
// If yes: Award client with score and add a new snake-food (if not
// possible or if reached victory condition -> game over).
// If not: Transfer the previous snake tail position to empty positions
// and remove head from empty positions.
if (isCollision(newHeadPos, this.currFoodPos)) {
this.score++;
if (this.emptyPos.isEmpty()
|| score == SnakeConstants.getVictoryCondition()) {
this.gameOver();
} else {
this.addFood();
}
} else {
this.emptyPos.add(this.snakePos.removeLast());
this.emptyPos.remove(newHeadPos);
}
// Game Over if snake is out of bounds.
if (isOutOfBounds(newHeadPos)) {
this.gameOver();
}
// Add head at new position.
this.snakePos.addFirst(newHeadPos);
}
/**
* Issues game-over by throwing GameOverException.
*
* @throws GameOverException
*/
private void gameOver() throws GameOverException {
gameOver = true;
throw new GameOverException(this.score);
}
/**
* Transfer first random position from empty ones to currFoodPos.
*/
private void addFood() {
// Randomly select an empty position to remove and set current food
// position to that one.
this.currFoodPos = this.emptyPos.get(this.rng.nextInt(this.emptyPos
.size()));
}
/**
* Helper method for adding new snake tiles to a position.
*
* @param pos
* The position in which to add a snake tile.
*/
private void addSnake(Position pos) {
this.emptyPos.remove(pos);
this.snakePos.add(pos);
}
/**
* Get next position of the snake.
*
* @return a new Position object representing next position.
*/
private Position getNextSnakePos() {
return this.snakePos.getFirst().moveDirection(this.direction);
}
/**
* @param newHeadPos
* @param otherPos
* @return true if Snake (newHeadPos) collides with another object
* (typically fruit) occupying otherPos
*/
private boolean isCollision(Position newHeadPos, Position otherPos) {
boolean collision = false;
if (otherPos.equals(newHeadPos)) {
collision = true;
}
return collision;
}
/**
* Check if Position pos is out of bounds on Android device.
*/
private boolean isOutOfBounds(Position pos) {
return pos.getX() < tileSize || pos.getY() < tileSize
|| pos.getY() > boardSize.getHeight() - tileSize
|| pos.getX() > boardSize.getWidth() - tileSize;
}
}
|
package org.csstudio.scan.command;
import java.io.PrintStream;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import org.csstudio.scan.device.DeviceInfo;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/** Description of a scan server command
*
* <p>Used by the client to describe commands to the server,
* and returned by the server to describe elements of a Scan.
*
* <p>This class offers generic property access
* based on introspection, assuming that the command has
* suitable 'getter' and 'setter' methods.
* A property with ID "some_property" must have associated "getSomeProperty"
* and "setSomeProperty" methods, i.e. using a CamelCase version
* of the property ID.
*
* <p>The setter must accept an {@link Object} like {@link Double} or {@link Boolean},
* not a plain type like double or boolean.
*
* <p>The command must allow concurrent access to its
* properties. The getter and setter should <code>synchronize</code>,
* or the property needs to be <code>volatile</code>.
*
* <p>Supported property types:
* <ul>
* <li><code>Double</code> - Edited as number "5", "1e-20" etc.
* <li><code>String</code> - Edited as text
* <li><code>Boolean</code> - Edited as yes/no
* <li><code>String[]</code> - Edited as strings
* <li><code>DeviceInfo</code> - Edited as string, but editor suggests available device names
* <li><code>DeviceInfo[]</code> - Edited as strings, but editor suggests available device names
* <li><code>Enum</code> - Allows selection among the <code>toString()</code> values of the Enum
* <li><code>Object</code> - Edited as String, and if possible converted to Double
* </ul>
*
* @author Kay Kasemir
*/
@SuppressWarnings("nls")
abstract public class ScanCommand
{
/** Configurable properties of this command */
final private ScanCommandProperty[] properties;
/** Address of this command within command sequence.
* <p>The {@link CommandSequence} assigns addresses 0, 1, 2, ...
* to all commands in the sequence to allow identification
* of each command while the sequence is executed.
*/
private long address = -1;
/** Error handler script name */
private volatile String error_handler = "";
/** Initialize */
public ScanCommand()
{
final List<ScanCommandProperty> properties = new ArrayList<>();
configureProperties(properties);
this.properties = properties.toArray(new ScanCommandProperty[properties.size()]);
}
/** @return Name of the command, which is the base of the class name */
final public String getCommandName()
{
final String name = getClass().getName();
final int sep = name.lastIndexOf('.');
return name.substring(sep + 1);
}
/** A command with implementing class "DoSomeThingCommand"
* has an ID of "do_some_thing"
* @return ID of this command
*/
final public String getCommandID()
{
// Detected by Frederic Arnaud:
// Java 7 will split "DoSomeThingCommand" into [ "", "Do", "Some", "Thing", "Command" ]
// Java 8 will instead return [ "Do", "Some", "Thing", "Command" ]
final String[] sections = getCommandName().split("(?=[A-Z][a-z])");
final int start = (sections.length > 0 && sections[0].isEmpty()) ? 1 : 0;
final StringBuilder buf = new StringBuilder();
final int N = sections.length;
for (int i=start; i<N-1; ++i)
{
if (i > start)
buf.append("_");
buf.append(sections[i].toLowerCase());
}
return buf.toString();
}
/** Address of this command within the command sequence
*
* <p>Addresses are only well-defined for the top-level
* command sequence.
* The <code>LoopCommand</code> will increment the addresses
* within its body, but the <code>IncludeCommand</code>
* will not be able to assign addresses for the included
* scan or possibly further sub-includes.
*
* @return Address of this command or -1
*/
final public long getAddress()
{
return address;
}
/** Set the address of this command.
*
* <p>To be called by scan system, not end user code.
* Derived commands, i.e. custom commands that wrap a
* "body" of commands need to override and forward
* the address update to their embedded commands.
*
* @param address Address of this command within command sequence
* @return Address of next command
*/
public long setAddress(final long address)
{
this.address = address;
return address+1;
}
/** Declare properties of this command
*
* <p>Derived classes should add their properties and
* call the base implementation to declare inherited properties.
*
* @param properties List to which to add properties
*/
protected void configureProperties(final List<ScanCommandProperty> properties)
{
properties.add(
new ScanCommandProperty("error_handler", "Error Handler", String.class));
}
/** @return Descriptions of Properties for this command */
final public ScanCommandProperty[] getProperties()
{
return properties;
}
/** @param property_id ID of a property
* @return Property description or <code>null</code> if property ID is not supported
*/
final public ScanCommandProperty getPropertyDescription(final String property_id)
{
for (ScanCommandProperty property : getProperties())
if (property.getID().equals(property_id))
return property;
return null;
}
/** Set a command's property
* @param property_id ID of the property to set
* @param value New value
* @throws UnknownScanCommandPropertyException when there is no property with that ID and value type
*/
public void setProperty(final String property_id, final Object value) throws UnknownScanCommandPropertyException
{
for (ScanCommandProperty property : getProperties())
if (property.getID().equals(property_id))
{
setProperty(property, value);
return;
}
throw new UnknownScanCommandPropertyException("Unknown property ID " + property_id + " for " + getClass().getName());
}
/** Set a command's property
* @param property Property to set
* @param value New value
* @throws UnknownScanCommandPropertyException when there is no property with that ID and value type
*/
public void setProperty(final ScanCommandProperty property, Object value) throws UnknownScanCommandPropertyException
{
final String meth_name = getMethodName("set", property.getID());
// Type patching: DeviceInfo is read/set as String
Class<?> type = property.getType();
if (type == DeviceInfo.class)
type = String.class;
else if (type == DeviceInfo[].class)
type = String[].class;
// Try to adjust string if more specific type is required
try
{
if (value instanceof String)
{
if (type == Double.class)
value = Double.parseDouble(value.toString());
else if (type == Boolean.class)
value = Boolean.parseBoolean(value.toString());
else if (type.isEnum())
{
for (Object e : type.getEnumConstants())
if (e.toString().equals(value))
{
value = e;
break;
}
}
}
}
catch (Throwable ex)
{
throw new UnknownScanCommandPropertyException("Property ID " + property.getID() +
" requires value type " + type.getName() + " but received " + value.getClass().getName() + " for " + getClass().getName());
}
try
{
final Method method = getClass().getMethod(meth_name, type);
method.invoke(this, value);
}
catch (Throwable ex)
{ // Expect Exception or RuntimeException, but FindBugs complained about using Exception,
// so using Throwable
throw new UnknownScanCommandPropertyException("Unknown property ID " + property.getID() +
" with value type " + value.getClass().getName() + " for " + getClass().getName());
}
}
/** Get a command's property
* @param property_id ID of the property to set
* @return Value
* @throws UnknownScanCommandPropertyException when there is no property with that ID and value type
*/
public Object getProperty(final String property_id) throws UnknownScanCommandPropertyException
{
for (ScanCommandProperty property : getProperties())
if (property.getID().equals(property_id))
return getProperty(property);
throw new UnknownScanCommandPropertyException("Unknown property ID " + property_id + " for " + getClass().getName());
}
/** Get a command's property
* @return Value
* @throws UnknownScanCommandPropertyException when there is no property with that ID and value type
*/
public Object getProperty(final ScanCommandProperty property) throws UnknownScanCommandPropertyException
{
final String meth_name = getMethodName("get", property.getID());
try
{
final Method method = getClass().getMethod(meth_name);
return method.invoke(this);
}
catch (Exception ex)
{
throw new UnknownScanCommandPropertyException("Unknown property ID " + property.getID() + " for " + getClass().getName());
}
}
/** Construct method name
* @param get_set Method prefix "get" or "set"
* @param property_id Property ID like "device_name"
* @return Method name like "setDeviceName"
*/
private String getMethodName(final String get_set, final String property_id)
{
final String[] sections = property_id.split("_");
final StringBuilder result = new StringBuilder(get_set);
for (String sec : sections)
{
result.append(sec.substring(0, 1).toUpperCase());
result.append(sec.substring(1));
}
return result.toString();
}
/** @return Name of error handler class */
public String getErrorHandler()
{
return error_handler;
}
/** @param error_handler Name of the error handler class */
public void setErrorHandler(final String error_handler)
{
this.error_handler = error_handler;
}
/** Write the command (and its sub-commands) to XML document.
*
* <p>A command called AbcCommand writes itself as a tag "abc"
* so that the {@link XMLCommandReader} can later determine
* which class to use for reading the command back from XML.
*
* <p>This method creates the overall XML element for the command
* and calls <code>addXMLElements()</code> for the content.
* Derived classes should update <code>addXMLElements()</code>.
*
* @param dom {@link Document}
* @param root Where to add content for this command
* @see ScanCommand#addXMLElements(Document, Element)
*/
final public void writeXML(final Document dom, final Element root)
{
final Element command_element = dom.createElement(getCommandID());
root.appendChild(command_element);
addXMLElements(dom, command_element);
}
/** Add XML elements for command (and its sub-commands) to document.
*
* <p>Derived classes should call parent implementation.
*
* @param dom {@link Document}
* @param command_element DOM {@link Element} for this command
*/
public void addXMLElements(final Document dom, final Element command_element)
{
if (! error_handler.isEmpty())
{
final Element element = dom.createElement("error_handler");
element.appendChild(dom.createTextNode(error_handler));
command_element.appendChild(element);
}
}
/** Read command parameters from XML element
*
* <p>Derived classes must call base class implementation
* to read inherited properties.
*
* @param factory ScanCommandFactory to use in case inner scan commands,
* for example a loop body, need to be created
* @param element
* @throws Exception on error, for example missing essential data
*/
public void readXML(final SimpleScanCommandFactory factory, final Element element) throws Exception
{
setErrorHandler(DOMHelper.getSubelementString(element, "error_handler", ""));
}
/** Write indentation
* @param out Where to print
* @param level Indentation level
*/
final protected void writeIndent(final PrintStream out, final int level)
{
for (int i=0; i<level; ++i)
out.print(" ");
}
/** @return Debug representation.
* Derived classes should provide their command name with properties
*/
@Override
public String toString()
{
return getClass().getName();
}
}
|
package test.http.router.handler;
import com.firefly.$;
import com.firefly.client.http2.*;
import com.firefly.codec.http2.frame.DataFrame;
import com.firefly.codec.http2.frame.HeadersFrame;
import com.firefly.codec.http2.frame.SettingsFrame;
import com.firefly.codec.http2.model.*;
import com.firefly.codec.http2.stream.*;
import com.firefly.server.http2.HTTP2Server;
import com.firefly.server.http2.ServerHTTPHandler;
import com.firefly.server.http2.ServerSessionListener;
import com.firefly.utils.concurrent.Callback;
import com.firefly.utils.concurrent.FuturePromise;
import com.firefly.utils.io.BufferUtils;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.Phaser;
import static com.firefly.utils.io.BufferUtils.toBuffer;
import static org.hamcrest.Matchers.is;
/**
* @author Pengtao Qiu
*/
public class TestH2cUpgrade extends AbstractHTTPHandlerTest {
@Test
public void test() throws Exception {
Phaser phaser = new Phaser(5);
HTTP2Server server = createServer();
HTTP2Client client = createClient(phaser);
phaser.arriveAndAwaitAdvance();
server.stop();
client.stop();
}
private static class TestH2cHandler extends ClientHTTPHandler.Adapter {
protected final ByteBuffer[] buffers;
protected final List<ByteBuffer> contentList = new CopyOnWriteArrayList<>();
public TestH2cHandler() {
buffers = null;
}
public TestH2cHandler(ByteBuffer[] buffers) {
this.buffers = buffers;
}
@Override
public void continueToSendData(MetaData.Request request, MetaData.Response response, HTTPOutputStream output,
HTTPConnection connection) {
System.out.println("client received 100 continue");
Assert.assertTrue(buffers != null);
try (HTTPOutputStream out = output) {
for (ByteBuffer buf : buffers) {
out.write(buf);
}
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public boolean content(ByteBuffer item, MetaData.Request request, MetaData.Response response,
HTTPOutputStream output,
HTTPConnection connection) {
System.out.println("client received data: " + BufferUtils.toUTF8String(item));
contentList.add(item);
return false;
}
}
private HTTP2Client createClient(Phaser phaser) throws Exception {
final HTTP2Configuration http2Configuration = new HTTP2Configuration();
http2Configuration.setFlowControlStrategy("simple");
http2Configuration.getTcpConfiguration().setTimeout(60 * 1000);
HTTP2Client client = new HTTP2Client(http2Configuration);
FuturePromise<HTTPClientConnection> promise = new FuturePromise<>();
client.connect(host, port, promise);
final HTTPClientConnection httpConnection = promise.get();
HTTPClientRequest request = new HTTPClientRequest("GET", "/index");
Map<Integer, Integer> settings = new HashMap<>();
settings.put(SettingsFrame.HEADER_TABLE_SIZE, http2Configuration.getMaxDynamicTableSize());
settings.put(SettingsFrame.INITIAL_WINDOW_SIZE, http2Configuration.getInitialStreamSendWindow());
SettingsFrame settingsFrame = new SettingsFrame(settings, false);
FuturePromise<HTTP2ClientConnection> http2Promise = new FuturePromise<>();
httpConnection.upgradeHTTP2(request, settingsFrame, http2Promise, new TestH2cHandler() {
@Override
public boolean messageComplete(MetaData.Request request, MetaData.Response response,
HTTPOutputStream output,
HTTPConnection connection) {
System.out.println("client received frame: " + response.getStatus() + ", " + response.getReason());
System.out.println(response.getFields());
System.out.println("
Assert.assertThat(response.getStatus(), is(HttpStatus.SWITCHING_PROTOCOLS_101));
Assert.assertThat(response.getFields().get(HttpHeader.UPGRADE), is("h2c"));
phaser.arrive();
return true;
}
});
HTTP2ClientConnection clientConnection = http2Promise.get();
HttpFields fields = new HttpFields();
fields.put(HttpHeader.USER_AGENT, "Firefly Client 1.0");
MetaData.Request post = new MetaData.Request("POST", HttpScheme.HTTP,
new HostPortHttpField(host + ":" + port),
"/data", HttpVersion.HTTP_1_1, fields);
clientConnection.sendRequestWithContinuation(post, new TestH2cHandler(new ByteBuffer[]{
ByteBuffer.wrap("hello world!".getBytes("UTF-8")),
ByteBuffer.wrap("big hello world!".getBytes("UTF-8"))}) {
@Override
public boolean messageComplete(MetaData.Request request, MetaData.Response response,
HTTPOutputStream output,
HTTPConnection connection) {
return dataComplete(phaser, BufferUtils.toString(contentList), response);
}
});
MetaData.Request get = new MetaData.Request("GET", HttpScheme.HTTP,
new HostPortHttpField(host + ":" + port),
"/test2", HttpVersion.HTTP_1_1, new HttpFields());
clientConnection.send(get, new TestH2cHandler() {
@Override
public boolean messageComplete(MetaData.Request request, MetaData.Response response,
HTTPOutputStream output,
HTTPConnection connection) {
System.out.println("client received frame: " + response.getStatus() + ", " + response.getReason());
System.out.println(response.getFields());
System.out.println("
Assert.assertThat(response.getStatus(), is(HttpStatus.NOT_FOUND_404));
phaser.arrive();
return true;
}
});
MetaData.Request post2 = new MetaData.Request("POST", HttpScheme.HTTP,
new HostPortHttpField(host + ":" + port),
"/data", HttpVersion.HTTP_1_1, fields);
clientConnection.send(post2, new ByteBuffer[]{
ByteBuffer.wrap("test data 2".getBytes("UTF-8")),
ByteBuffer.wrap("finished test data 2".getBytes("UTF-8"))}, new TestH2cHandler() {
@Override
public boolean messageComplete(MetaData.Request request, MetaData.Response response,
HTTPOutputStream output,
HTTPConnection connection) {
return dataComplete(phaser, BufferUtils.toString(contentList), response);
}
});
return client;
}
public boolean dataComplete(Phaser phaser, String content, MetaData.Response response) {
System.out.println("client received frame: " + response.getStatus() + ", " + response.getReason());
System.out.println(response.getFields());
System.out.println("
Assert.assertThat(response.getStatus(), is(HttpStatus.OK_200));
Assert.assertThat(content, is("Receive data stream successful. Thank you!"));
phaser.arrive();
return true;
}
private HTTP2Server createServer() {
final HTTP2Configuration http2Configuration = new HTTP2Configuration();
http2Configuration.setFlowControlStrategy("simple");
http2Configuration.getTcpConfiguration().setTimeout(60 * 1000);
HTTP2Server server = new HTTP2Server(host, port, http2Configuration, new ServerHTTPHandler.Adapter() {
@Override
public boolean accept100Continue(MetaData.Request request, MetaData.Response response, HTTPOutputStream output,
HTTPConnection connection) {
System.out.println("received expect continue ");
return false;
}
@Override
public boolean content(ByteBuffer item, MetaData.Request request, MetaData.Response response, HTTPOutputStream output,
HTTPConnection connection) {
System.out.println("received data: " + BufferUtils.toString(item, StandardCharsets.UTF_8));
return false;
}
@Override
public boolean messageComplete(MetaData.Request request, MetaData.Response response, HTTPOutputStream outputStream,
HTTPConnection connection) {
HttpURI uri = request.getURI();
System.out.println("message complete: " + uri);
System.out.println(request.getFields());
System.out.println("
switch (uri.getPath()) {
case "/index":
response.setStatus(HttpStatus.Code.OK.getCode());
response.setReason(HttpStatus.Code.OK.getMessage());
try (HTTPOutputStream output = outputStream) {
output.writeWithContentLength(toBuffer("receive initial stream successful", StandardCharsets.UTF_8));
} catch (IOException e) {
e.printStackTrace();
}
break;
case "/data":
response.setStatus(HttpStatus.Code.OK.getCode());
response.setReason(HttpStatus.Code.OK.getMessage());
try (HTTPOutputStream output = outputStream) {
output.write(toBuffer("Receive data stream successful. ", StandardCharsets.UTF_8));
output.write(toBuffer("Thank you!", StandardCharsets.UTF_8));
} catch (IOException e) {
e.printStackTrace();
}
break;
default:
response.setStatus(HttpStatus.Code.NOT_FOUND.getCode());
response.setReason(HttpStatus.Code.NOT_FOUND.getMessage());
try (HTTPOutputStream output = outputStream) {
output.writeWithContentLength(toBuffer(uri.getPath() + " not found", StandardCharsets.UTF_8));
} catch (IOException e) {
e.printStackTrace();
}
break;
}
return true;
}
});
server.start();
return server;
}
@Test
public void testLowLevelAPI() throws Exception {
Phaser phaser = new Phaser(2);
HTTP2Server server = createServerLowLevelAPI();
HTTP2Client client = createClientLowLevelClient(phaser);
phaser.arriveAndAwaitAdvance();
server.stop();
client.stop();
}
public HTTP2Server createServerLowLevelAPI() {
final HTTP2Configuration http2Configuration = new HTTP2Configuration();
http2Configuration.setFlowControlStrategy("simple");
http2Configuration.getTcpConfiguration().setTimeout(60 * 1000);
HTTP2Server server = new HTTP2Server(host, port, http2Configuration, new ServerSessionListener.Adapter() {
@Override
public Map<Integer, Integer> onPreface(Session session) {
System.out.println("session preface: " + session);
final Map<Integer, Integer> settings = new HashMap<>();
settings.put(SettingsFrame.HEADER_TABLE_SIZE, http2Configuration.getMaxDynamicTableSize());
settings.put(SettingsFrame.INITIAL_WINDOW_SIZE, http2Configuration.getInitialStreamSendWindow());
return settings;
}
@Override
public Stream.Listener onNewStream(Stream stream, HeadersFrame frame) {
System.out.println("Server new stream, " + frame.getMetaData() + "|" + stream);
MetaData metaData = frame.getMetaData();
Assert.assertTrue(metaData.isRequest());
final MetaData.Request request = (MetaData.Request) metaData;
if (frame.isEndStream()) {
if (request.getURI().getPath().equals("/index")) {
MetaData.Response response = new MetaData.Response(HttpVersion.HTTP_2, 200, new HttpFields());
HeadersFrame headersFrame = new HeadersFrame(stream.getId(), response, null, false);
DataFrame dataFrame = new DataFrame(stream.getId(), BufferUtils.toBuffer("The server received message"), true);
stream.headers(headersFrame, Callback.NOOP);
stream.data(dataFrame, Callback.NOOP);
}
}
List<ByteBuffer> contentList = new CopyOnWriteArrayList<>();
return new Stream.Listener.Adapter() {
@Override
public void onHeaders(Stream stream, HeadersFrame frame) {
System.out.println("Server stream on headers " + frame.getMetaData() + "|" + stream);
}
@Override
public void onData(Stream stream, DataFrame frame, Callback callback) {
System.out.println("Server stream on data: " + frame);
contentList.add(frame.getData());
if (frame.isEndStream()) {
MetaData.Response response = new MetaData.Response(HttpVersion.HTTP_2, 200, new HttpFields());
HeadersFrame responseFrame = new HeadersFrame(stream.getId(), response, null, false);
DataFrame dataFrame = new DataFrame(stream.getId(), BufferUtils.toBuffer("The server received data"), true);
System.out.println("Server session on data end: " + BufferUtils.toString(contentList));
stream.headers(responseFrame, Callback.NOOP);
stream.data(dataFrame, Callback.NOOP);
}
callback.succeeded();
}
};
}
@Override
public void onAccept(Session session) {
System.out.println("accept a new session " + session);
}
}, new ServerHTTPHandler.Adapter());
server.start();
return server;
}
public HTTP2Client createClientLowLevelClient(Phaser phaser) throws Exception {
final HTTP2Configuration http2Configuration = new HTTP2Configuration();
http2Configuration.setFlowControlStrategy("simple");
http2Configuration.getTcpConfiguration().setTimeout(60 * 1000);
HTTP2Client client = new HTTP2Client(http2Configuration);
FuturePromise<HTTPClientConnection> promise = new FuturePromise<>();
client.connect(host, port, promise);
HTTPConnection connection = promise.get();
Assert.assertThat(connection.getHttpVersion(), is(HttpVersion.HTTP_1_1));
final HTTP1ClientConnection httpConnection = (HTTP1ClientConnection) connection;
HTTPClientRequest request = new HTTPClientRequest("GET", "/index");
Map<Integer, Integer> settings = new HashMap<>();
settings.put(SettingsFrame.HEADER_TABLE_SIZE, http2Configuration.getMaxDynamicTableSize());
settings.put(SettingsFrame.INITIAL_WINDOW_SIZE, http2Configuration.getInitialStreamSendWindow());
SettingsFrame settingsFrame = new SettingsFrame(settings, false);
FuturePromise<HTTP2ClientConnection> http2promise = new FuturePromise<>();
FuturePromise<Stream> initStream = new FuturePromise<>();
httpConnection.upgradeHTTP2(request, settingsFrame, http2promise, initStream, new Stream.Listener.Adapter() {
@Override
public void onHeaders(Stream stream, HeadersFrame frame) {
System.out.println($.string.replace("client stream {} received init headers: {}", stream.getId(), frame.getMetaData()));
}
}, new Session.Listener.Adapter() {
@Override
public Map<Integer, Integer> onPreface(Session session) {
System.out.println($.string.replace("client preface: {}", session));
Map<Integer, Integer> settings = new HashMap<>();
settings.put(SettingsFrame.HEADER_TABLE_SIZE, http2Configuration.getMaxDynamicTableSize());
settings.put(SettingsFrame.INITIAL_WINDOW_SIZE, http2Configuration.getInitialStreamSendWindow());
return settings;
}
@Override
public void onFailure(Session session, Throwable failure) {
failure.printStackTrace();
}
}, new ClientHTTPHandler.Adapter());
HTTP2ClientConnection clientConnection = http2promise.get();
Assert.assertThat(clientConnection.getHttpVersion(), is(HttpVersion.HTTP_2));
HttpFields fields = new HttpFields();
fields.put(HttpHeader.ACCEPT, "text/html");
fields.put(HttpHeader.USER_AGENT, "Firefly Client 1.0");
fields.put(HttpHeader.CONTENT_LENGTH, "28");
MetaData.Request metaData = new MetaData.Request("POST", HttpScheme.HTTP,
new HostPortHttpField(host + ":" + port), "/data", HttpVersion.HTTP_2, fields);
FuturePromise<Stream> streamPromise = new FuturePromise<>();
clientConnection.getHttp2Session().newStream(new HeadersFrame(metaData, null, false), streamPromise,
new Stream.Listener.Adapter() {
@Override
public void onHeaders(Stream stream, HeadersFrame frame) {
System.out.println($.string.replace("client received headers: {}", frame.getMetaData()));
}
@Override
public void onData(Stream stream, DataFrame frame, Callback callback) {
System.out.println($.string.replace("client received data: {}, {}", BufferUtils.toUTF8String(frame.getData()), frame));
if (frame.isEndStream()) {
phaser.arrive();
}
callback.succeeded();
}
});
final Stream clientStream = streamPromise.get();
System.out.println("client stream id: " + clientStream.getId());
clientStream.data(new DataFrame(clientStream.getId(),
toBuffer("hello world!", StandardCharsets.UTF_8), false), new Callback() {
@Override
public void succeeded() {
clientStream.data(new DataFrame(clientStream.getId(),
toBuffer("big hello world!", StandardCharsets.UTF_8), true), Callback.NOOP);
}
});
return client;
}
}
|
// IT Innovation Centre of Gamma House, Enterprise Road,
// Chilworth Science Park, Southampton, SO16 7NS, UK.
// or reproduced in whole or in part in any manner or form or in or
// on any media by any person other than in accordance with the terms
// of the Licence Agreement supplied with the software, or otherwise
// PURPOSE, except where stated in the Licence Agreement supplied with
// the software.
// Created Date : 2014-07-17
// Created for Project : Experimedia
package uk.ac.soton.itinnovation.experimedia.arch.edm.test.prov;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.rmi.AlreadyBoundException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.LinkedList;
import java.util.Properties;
import java.util.Random;
import java.util.UUID;
import java.util.zip.DataFormatException;
import javax.xml.datatype.DatatypeConfigurationException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.provenance.EDMActivity;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.provenance.EDMAgent;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.provenance.EDMEntity;
import uk.ac.soton.itinnovation.experimedia.arch.ecc.common.dataModel.provenance.EDMProvFactory;
public class ExperimentDataGenerator {
//how many seconds to wait until reading the next log. Set to 0 for max speed
public static final int SPEED = 1;
//located in resources folder
public static final String LOGFILE = "sample.log";
private LinkedList<String> rawlog = new LinkedList<String>();
private EDMProvFactory factory;
private static final Properties props = new Properties();
private static final Logger logger = LoggerFactory.getLogger(ExperimentDataGenerator.class);
public ExperimentDataGenerator() {
logger.info("Starting ExperimentDataGenerator");
try {
logger.info("Loading properties file");
props.load(ExperimentDataGeneratorTest.class.getClassLoader().getResourceAsStream("prov.properties"));
} catch (IOException e) {
logger.error("Error loading properties file", e);
}
}
/**
* Reads the logfile and saves contents in memory
*/
public void readLog() {
try {
String logfilePath = ExperimentDataGeneratorTest.class.getClassLoader().getResource(LOGFILE).getPath();
FileReader fr = new FileReader(new File(logfilePath));
BufferedReader br = new BufferedReader(fr);
String line;
while((line = br.readLine()) != null) {
rawlog.add(line.trim());
}
} catch (IOException e) {
logger.error("Error reading from logfile " + LOGFILE, e);
}
}
/**
* Parses the in-memory log and creates prov statements
*/
public void parseLog() {
try {
//init factory
factory = EDMProvFactory.getInstance();
factory.addOntology("foaf", "http://xmlns.com/foaf/0.1/");
factory.addOntology("sioc", "http://rdfs.org/sioc/ns
//add ontology namespaces
factory.addOntology("eee", "http://experimedia.eu/ontologies/ExperimediaExperimentExplorer
factory.addOntology("ski", "http://www.semanticweb.org/sw/ontologies/skiing
//create bob
EDMAgent bob = factory.createAgent("agent_" + UUID.randomUUID(), "Bob");
bob.addOwlClass(factory.getNamespaceForPrefix("foaf") + "Person");
bob.addOwlClass(factory.getNamespaceForPrefix("eee") + "Participant");
//TODO: link bob to his metric entity representation
//create app
EDMActivity useApp = bob.startActivity("useAppActivity_" + UUID.randomUUID(), "Use app", "1387531200");
EDMEntity appEntity = useApp.generateEntity("entity_" + UUID.randomUUID(), "App", "1387531201");
appEntity.addOwlClass(factory.getNamespaceForPrefix("eee") + "Application");
EDMAgent appAgent = factory.createAgent(appEntity.getIri(), appEntity.getFriendlyName());
appAgent.addTriple(factory.getNamespaceForPrefix("owl") + "sameAs", appEntity.getIri());
appAgent.actOnBehalfOf(bob);
//create services
EDMEntity twitterService = factory.createEntity("entity_" + UUID.randomUUID(), "Twitter service");
twitterService.addOwlClass(factory.getNamespaceForPrefix("eee") + "Service");
EDMEntity messageService = factory.createEntity("entity_" + UUID.randomUUID(), "Message service");
messageService.addOwlClass(factory.getNamespaceForPrefix("eee") + "Service");
EDMEntity babylonService = factory.createEntity("entity_" + UUID.randomUUID(), "Babylon service");
babylonService.addOwlClass(factory.getNamespaceForPrefix("eee") + "Service");
//create skilifts
EDMEntity[] skilifts = new EDMEntity[5];
EDMEntity skilift1 = factory.createEntity("skilift-37", "Fritz Blitz");
skilift1.addOwlClass(factory.getNamespaceForPrefix("ski") + "Skilift");
skilifts[0] = skilift1;
EDMEntity skilift2 = factory.createEntity("skilift-31", "Sonneckbahn");
skilift1.addOwlClass(factory.getNamespaceForPrefix("ski") + "Skilift");
skilifts[1] = skilift2;
EDMEntity skilift3 = factory.createEntity("skilift-33", "Märchenwiesebahn");
skilift1.addOwlClass(factory.getNamespaceForPrefix("ski") + "Skilift");
skilifts[2] = skilift3;
EDMEntity skilift4 = factory.createEntity("skilift-27", "Lärchkogelbahn");
skilift1.addOwlClass(factory.getNamespaceForPrefix("ski") + "Skilift");
skilifts[3] = skilift4;
EDMEntity skilift5 = factory.createEntity("skilift-35", "Weitmoos-Tellerlift");
skilift1.addOwlClass(factory.getNamespaceForPrefix("ski") + "Skilift");
skilifts[4] = skilift5;
LinkedList<EDMEntity> messages = new LinkedList<EDMEntity>();
LinkedList<EDMEntity> tweets = new LinkedList<EDMEntity>();
Random rand = new Random();
for (Log log = getNextLog(); log !=null; log = getNextLog()) {
//random event: throw dice
double random = rand.nextDouble();
//logger.debug("random: " + random);
if (random>0.95) {
//only makes sense while not skiing
if ((new Double(log.speed))<5) {
//use random skilift
EDMActivity a = useRealWorldEntity(bob, skilifts[rand.nextInt(skilifts.length)], log.timestamp.toString());
a.addOwlClass(factory.getNamespaceForPrefix("ski") + "UsingSkiliftActivity");
//create some random liftwaiting time data + QoE
EDMEntity qoe = createDataAtService(bob, appAgent, appEntity, babylonService, "rate skilift", log.timestamp.toString());
//TODO: link QoE entity to metrics
}
} else if (random<=0.95 && random>0.9) {
//navigate app
navigateClient(bob, appEntity, log.timestamp.toString());
} else if (random<=0.9 && random>0.85) {
//read message
if (messages.size()>0) {
useDataOnClient(bob, appEntity, messages.pollFirst(), "message", log.timestamp.toString());
}
} else if (random<=0.85 && random>0.8) {
//create data on client
EDMEntity photo = createDataOnClient(bob, appEntity, "Photo", log.timestamp.toString());
} else if (random<=0.8 && random>0.75) {
//read tweet
if (tweets.size()>0) {
useDataOnClient(bob, appEntity, tweets.pollFirst(), "tweet", log.timestamp.toString());
}
} else if (random<=0.7 && random>0.65) {
//receive message
messages.add(retrieveDataFromService(bob, appAgent, appEntity, messageService, "message", log.timestamp.toString()));
//TODO: put metric data here, link to relevant entities
} else if (random<=0.6 && random>0.55) {
//receive tweet
tweets.add(retrieveDataFromService(bob, appAgent, appEntity, twitterService, "tweet", log.timestamp.toString()));
//TODO: put metric data here, link to relevant entities
} else if (random<=0.55 && random>0.5) {
//tweet
createDataAtService(bob, appAgent, appEntity, twitterService, "tweet", log.timestamp.toString());
} else {
//do nothing
}
}
useApp.invalidateEntity(appEntity);
bob.stopActivity(useApp, "1387533300");
logger.info("finished parsing log");
} catch (Exception e) {
logger.error("Error filling EDMProvFactory with data", e);
}
}
public EDMActivity useRealWorldEntity(EDMAgent participant, EDMEntity entity, String timestamp)
throws DataFormatException, DatatypeConfigurationException, AlreadyBoundException {
EDMActivity a = participant.doDiscreteActivity("useSkiliftActivity_" + UUID.randomUUID(), "Use " + entity.getFriendlyName(), timestamp);
a.useEntity(entity);
return a;
}
public EDMEntity createDataOnClient(EDMAgent participant, EDMEntity client, String dataName, String timestamp)
throws DataFormatException, DatatypeConfigurationException, AlreadyBoundException, NoSuchFieldException {
String dName = dataName.substring(0,1).toUpperCase() + dataName.replaceAll(" ", "").substring(1);
EDMActivity a = participant.doDiscreteActivity("Create" + dName + "Activity_" + UUID.randomUUID(), "Create " + dataName, timestamp);
a.useEntity(client);
EDMEntity data = a.generateEntity("Content_" + UUID.randomUUID(), dataName);
data.addOwlClass(factory.getNamespaceForPrefix("eee") + "Content");
return data;
}
public void useDataOnClient(EDMAgent participant, EDMEntity client, EDMEntity data, String dataName, String timestamp)
throws DataFormatException, DatatypeConfigurationException, AlreadyBoundException {
String dName = dataName.substring(0,1).toUpperCase() + dataName.replaceAll(" ", "").substring(1);
EDMActivity a = participant.doDiscreteActivity("Use" + dName + "Activity_" + UUID.randomUUID(), "Use " + dataName, timestamp);
a.useEntity(data);
a.useEntity(client);
}
public void navigateClient(EDMAgent participant, EDMEntity client, String timestamp)
throws DataFormatException, DatatypeConfigurationException, AlreadyBoundException {
EDMActivity a = participant.doDiscreteActivity("NavigateClientActitivy_" + UUID.randomUUID(), "Navigate " + client.getFriendlyName(), timestamp);
a.useEntity(client);
}
public EDMEntity retrieveDataFromService(EDMAgent participant, EDMAgent clientAgent, EDMEntity clientEntity, EDMEntity service, String activityName, String timestamp)
throws DataFormatException, DatatypeConfigurationException, AlreadyBoundException, NoSuchFieldException {
String actName = activityName.substring(0,1).toUpperCase() + activityName.substring(1);
EDMActivity a = participant.doDiscreteActivity("Receive" + actName + "Activity_" + UUID.randomUUID(), "Receive " + activityName, timestamp);
a.useEntity(clientEntity);
EDMActivity b = clientAgent.doDiscreteActivity("Retrieve" + actName + "Activity_" + UUID.randomUUID(), "Retrieve " + activityName, timestamp);
a.informActivity(b);
b.useEntity(service);
EDMEntity data = b.generateEntity(actName + "Data_" + UUID.randomUUID(), actName + " data", timestamp);
data.addOwlClass(factory.getNamespaceForPrefix("eee") + "Content");
a.useEntity(data);
return data;
}
public EDMEntity createDataAtService(EDMAgent participant, EDMAgent clientAgent, EDMEntity clientEntity, EDMEntity service, String activityName, String timestamp)
throws DataFormatException, DatatypeConfigurationException, AlreadyBoundException, NoSuchFieldException {
String actName = activityName.substring(0,1).toUpperCase() + activityName.substring(1);
EDMActivity a = participant.doDiscreteActivity("Create" + actName + "Activity_" + UUID.randomUUID(), participant.getFriendlyName() + " tweets", timestamp);
a.useEntity(clientEntity);
EDMEntity data = a.generateEntity(actName + "_" + UUID.randomUUID(), participant.getFriendlyName() + "'s " + activityName, timestamp);
data.addOwlClass(factory.getNamespaceForPrefix("eee") + "Content");
EDMActivity b = clientAgent.startActivity("Send" + actName + "Activity_" + UUID.randomUUID(), "Send " + activityName + " to server", timestamp);
b.useEntity(service);
b.useEntity(data);
a.informActivity(b);
return data;
}
private Log getNextLog() {
//sleep if pause required
if (SPEED>0) {
try {
Thread.sleep(SPEED);
} catch (InterruptedException e) {
logger.warn("Error slowing parser down for more realistic simulation", e);
}
}
Log log = null;
//only process complete logs, abort otherwise
if (rawlog.size()>2) {
String[] lines = new String[3];
lines[0] = rawlog.pollFirst();
String date1 = lines[0].substring(0,19);
//check date
//as expected, append next line
if (rawlog.peekFirst().startsWith(date1)) {
lines[1] = rawlog.pollFirst();
//check next date
//as expected, append last line
if(rawlog.peekFirst().startsWith(date1)) {
lines[2] = rawlog.pollFirst();
//unexpected, we must have started at line 2 instead of line 1
} else {
lines[0] = rawlog.pollFirst();
lines[1] = rawlog.pollFirst();
lines[2] = rawlog.pollFirst();
}
//unexpected - date has changed, so we must be at the first one of a block of 3
} else {
lines[0] = rawlog.pollFirst();
lines[1] = rawlog.pollFirst();
lines[2] = rawlog.pollFirst();
}
if (lines[0]!=null && lines[1]!=null && lines[2]!=null) {
log = new Log(lines);
}
}
return log;
}
// PRIVATE CLASS
private class Log {
public String[] lines;
public String date;
public String time;
public Long timestamp;
public String longitude = null;
public String latitude = null;
public String altitude = null;
public String speed = null;
public String temperature = null;
Log(String[] lines) {
this.lines = lines;
//date/time
this.date = lines[0].substring(0,10);
this.time = lines[0].substring(11,19);
//logger.debug(this.date + ", " + this.time);
//timestamp
try {
SimpleDateFormat formatter = new SimpleDateFormat("dd-MM-yyyy HH:mm:ss");
Date fullDate = formatter.parse(this.date + " " + this.time);
this.timestamp = fullDate.getTime()/1000L;
} catch (ParseException e) {
logger.error("Error parsing date to create timestamp", e);
}
//check for long/lat/alt
if (lines[0].split(",").length>2) {
this.latitude = lines[0].split(",")[2];
if (!this.latitude.matches("\\d{1,2}\\.\\d{1,15}")) {
this.latitude = null;
}
}
if (lines[0].split(",").length>3) {
this.longitude = lines[0].split(",")[3];
if (!this.longitude.matches("\\d{1,2}\\.\\d{1,15}")) {
this.longitude = null;
}
}
if (lines[0].split(",").length>4) {
this.altitude = lines[0].split(",")[4];
if (!this.altitude.matches("\\d{1,4}\\.\\d{1,2}")) {
this.altitude = null;
}
}
//logger.debug("lat: " + this.latitude + ", long: " + this.longitude + ", alt: " + this.altitude);
//speed/temp
if (lines[1].split(",").length>2) {
this.speed = lines[1].split(",")[2];
}
if (lines[2].split(",").length>2) {
this.temperature = lines[2].split(",")[2];
}
//logger.debug("speed: " + this.speed + ", temperature: " + this.temperature);
}
@Override
public String toString() {
return this.lines[0] + "\n" + this.lines[1] + "\n" + this.lines[2];
}
}
//GETTERS/SETTERS//////////////////////////////////////////////////////////////////////////////
public LinkedList<String> getRawlog() {
return rawlog;
}
public EDMProvFactory getFactory() {
return factory;
}
public static Properties getProps() {
return props;
}
}
|
package org.innovateuk.ifs.documentation;
import org.innovateuk.ifs.competition.builder.CompetitionResourceBuilder;
import org.innovateuk.ifs.competition.publiccontent.resource.FundingType;
import org.innovateuk.ifs.competition.resource.GrantTermsAndConditionsResource;
import org.innovateuk.ifs.file.resource.FileEntryResource;
import org.springframework.restdocs.payload.FieldDescriptor;
import java.time.ZonedDateTime;
import static com.google.common.primitives.Longs.asList;
import static java.util.Collections.singleton;
import static org.innovateuk.ifs.competition.builder.CompetitionResourceBuilder.newCompetitionResource;
import static org.springframework.restdocs.payload.PayloadDocumentation.fieldWithPath;
public class CompetitionResourceDocs {
public static final FieldDescriptor[] competitionResourceFields = {
fieldWithPath("id").description("Id of the competitionResource").optional(),
fieldWithPath("name").description("name of the competition").optional(),
fieldWithPath("startDate").description("date the competition opens for submissions").optional(),
fieldWithPath("endDate").description("date the submissions phase of the competition closes").optional(),
fieldWithPath("registrationDate").description("date on which the registration closes").optional(),
fieldWithPath("assessorAcceptsDate").description("date by which assessors should accept or reject invitations to assess applications").optional(),
fieldWithPath("assessorDeadlineDate").description("date by which assessors should submit their application feedback").optional(),
fieldWithPath("assessorBriefingDate").description("date on which assessors will be briefed on the competition").optional(),
fieldWithPath("fundersPanelDate").description("date on which the funders panel begins").optional(),
fieldWithPath("fundersPanelEndDate").description("date on which the funders panel ended").optional(),
fieldWithPath("assessorFeedbackDate").description("date on which applicants can expect to receive feedback from the assessments").optional(),
fieldWithPath("releaseFeedbackDate").description("date on which the feedback is intended to be released").optional(),
fieldWithPath("feedbackReleasedDate").description("date on which the feedback is released").optional(),
fieldWithPath("competitionStatus").description("the current status of the competition").optional(),
fieldWithPath("maxResearchRatio").description("maximum ratio of research participation").optional(),
fieldWithPath("academicGrantPercentage").description("grant claim percentage for the academics").optional(),
fieldWithPath("milestones").description("List of milestone ids").optional(),
fieldWithPath("competitionType").description("the competition type this competition belongs to").optional(),
fieldWithPath("competitionTypeName").description("the name of the competition type this competition belongs to").optional(),
fieldWithPath("executive").description("the user id of the competition executive").optional(),
fieldWithPath("executiveName").description("the name of the competition executive").optional(),
fieldWithPath("leadTechnologist").description("the user id of the competition leadTechnologist").optional(),
fieldWithPath("leadTechnologistName").description("the name of the competition leadTechnologist").optional(),
fieldWithPath("innovationSector").description("the Innovation sector this competition belongs to").optional(),
fieldWithPath("innovationSectorName").description("the Innovation sector name this competition belongs to").optional(),
fieldWithPath("innovationAreas").description("the Innovation areas this competition belongs to").optional(),
fieldWithPath("innovationAreaNames").description("the names of the Innovation areas this competition belongs to").optional(),
fieldWithPath("pafCode").description("the paf code entered during competition setup").optional(),
fieldWithPath("budgetCode").description("the budget code entered during competition setup").optional(),
fieldWithPath("code").description("the unique competition code entered during competition setup").optional(),
fieldWithPath("resubmission").description("indicates if the competition has the ability to do a resubmission").optional(),
fieldWithPath("multiStream").description("indicates if the competition has multiple streams").optional(),
fieldWithPath("streamName").description("the name of the stream"),
fieldWithPath("financeRowTypes").description("The finance types supported by this competition").optional(),
fieldWithPath("collaborationLevel").description("collaboration level (single, collaborative...)").optional(),
fieldWithPath("leadApplicantTypes").description("permitted organisation types of lead applicant (business, research...)").optional(),
fieldWithPath("researchCategories").description("the research categories entered during competition setup").optional(),
fieldWithPath("activityCode").description("the activity code entered during competition setup").optional(),
fieldWithPath("funders").description("the funders for this competition").optional(),
fieldWithPath("useResubmissionQuestion").description("should applications include the default resubmission question").optional(),
fieldWithPath("assessorCount").description("How many assessors are required to assess each application").optional(),
fieldWithPath("assessorPay").description("How much will assessors be paid per application they assess").optional(),
fieldWithPath("applicationFinanceType").description("The type of finances for the application").optional(),
fieldWithPath("setupComplete").description("Has the setup been completed and will move to open once past the open date").optional(),
fieldWithPath("completionStage").description("The stage at which the Competition is deemed closed").optional(),
fieldWithPath("nonIfs").description("Is this competition a non-ifs competition (not managed via IFS)").optional(),
fieldWithPath("nonIfsUrl").description("The URL to apply to the competition if it is a non-ifs competition").optional(),
fieldWithPath("hasAssessmentPanel").description("Indicates if the competition will have an assessment panel stage").optional(),
fieldWithPath("hasInterviewStage").description("Indicates if the competition will have an interview stage").optional(),
fieldWithPath("assessorFinanceView").description("Indicates if the competition will display an overview or a detailed view of the finances for the assessor").optional(),
fieldWithPath("termsAndConditions").description("The terms and conditions template that applies to this competition").optional(),
fieldWithPath("locationPerPartner").description("Indicates if the project location per partner is required during project setup for this competition").optional(),
fieldWithPath("minProjectDuration").description("The minimum amount of weeks that projects under this competition should last").optional(),
fieldWithPath("maxProjectDuration").description("The maximum amount of weeks that projects under this competition projects should last").optional(),
fieldWithPath("stateAid").description("Indicates if the competition has state aid eligibility").optional(),
fieldWithPath("grantClaimMaximums").description("List of grant claim maximums belonging to the competition").optional(),
fieldWithPath("competitionDocuments").description("List of documents required during the project setup phase").optional(),
fieldWithPath("nonFinanceType").description("Does the competition have finance questions").optional(),
fieldWithPath("includeJesForm").description("Does the competition include the Je-S form for academics").optional(),
fieldWithPath("includeProjectGrowthTable").description("Indicate if the competition has a project growth table").optional(),
fieldWithPath("includeYourOrganisationSection").description("Indicates if the competition has a your organisation section").optional(),
fieldWithPath("fundingType").description("The FundingType of the competition").optional(),
fieldWithPath("projectSetupStages").description("The stages an applicant most complete when application is in project setup").optional(),
fieldWithPath("competitionTerms").description("Any competition-specific terms").optional(),
fieldWithPath("createdBy").description("user who created this competition").optional(),
fieldWithPath("createdOn").description("when the competition was created").optional(),
fieldWithPath("modifiedBy").description("user who modified this competition").optional(),
fieldWithPath("modifiedOn").description("when the competition was modified").optional(),
fieldWithPath("hasAssessmentStage").description("Does the competition has assessors.").optional(),
fieldWithPath("covidType").description("The type of covid comp if any").optional(),
fieldWithPath("postAwardService").description("The service used post-award, if any").optional(),
};
public static final CompetitionResourceBuilder competitionResourceBuilder = newCompetitionResource()
.withId(1L)
.withName("competition name")
.withStartDate(ZonedDateTime.now())
.withEndDate(ZonedDateTime.now().plusDays(30))
.withRegistrationCloseDate(ZonedDateTime.now().plusDays(2))
.withAssessorAcceptsDate(ZonedDateTime.now().plusDays(35))
.withAssessorDeadlineDate(ZonedDateTime.now().plusDays(40))
.withFundersPanelDate(ZonedDateTime.now().plusDays(42))
.withFundersPanelEndDate(ZonedDateTime.now().plusDays(44))
.withAssessorFeedbackDate(ZonedDateTime.now().plusDays(56))
.withReleaseFeedbackDate(ZonedDateTime.now().plusDays(62))
.withMaxResearchRatio(20)
.withAcademicGrantClaimPercentage(100)
.withCompetitionCode("COMP-1")
.withCompetitionType(1L)
.withExecutive(1L)
.withLeadTechnologist(1L)
.withLeadTechnologistName("Innovation Lead")
.withInnovationAreas(singleton(1L))
.withInnovationAreaNames(singleton("Tech"))
.withInnovationSector(2L)
.withInnovationSectorName("IT")
.withLeadApplicantType(asList(1L, 2L))
.withPafCode("PAF-123")
.withBudgetCode("BUDGET-456")
.withActivityCode("Activity-Code")
.withNonIfs(true)
.withNonIfsUrl("https://google.co.uk")
.withMilestones(asList(1L, 2L, 3L))
.withTermsAndConditions(new GrantTermsAndConditionsResource("T&C", "terms-and-conditions-template", 1))
.withStateAid(true)
.withIncludeJesForm(true)
.withFundingType(FundingType.PROCUREMENT)
.withCompetitionTerms((FileEntryResource) null);
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package pl.edu.icm.coansys.importers.io.writers.hbase;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import static pl.edu.icm.coansys.importers.constants.HBaseConstant.*;
import java.io.IOException;
import java.util.Date;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.pig.backend.executionengine.ExecException;
import pl.edu.icm.coansys.importers.models.DocumentProtosWrapper.DocumentWrapper;
/**
*
* @author akawa
*/
public class HBaseToDocumentProtoSequenceFile implements Tool {
private static Logger logger = Logger.getLogger(HBaseToDocumentProtoSequenceFile.class);
private Configuration conf;
@Override
public void setConf(Configuration conf) {
this.conf = conf;
}
@Override
public Configuration getConf() {
return conf;
}
public static enum Counters {
DPROTO, CPROTO, MPROTO, DPROTO_SKIPPED
}
public static class RowToDocumentProtoMapper extends TableMapper<BytesWritable, BytesWritable> {
private BytesWritable key = new BytesWritable();
private BytesWritable documentProto = new BytesWritable();
private BytesWritable metatdataProto = new BytesWritable();
private ResultToProtoBytesConverter converter = new ResultToProtoBytesConverter();
private DocumentWrapper.Builder dw = DocumentWrapper.newBuilder();
private MultipleOutputs mos = null;
private int MAX_SIZE = 1000000;
@Override
public void setup(Context context) {
mos = new MultipleOutputs(context);
}
@Override
public void map(ImmutableBytesWritable row, Result values, Context context) throws IOException, InterruptedException {
converter.set(values, dw);
if (logger.isDebugEnabled()) {
logger.debug("reading data from HBase");
}
byte[] rowId = converter.getRowId();
byte[] mproto = converter.getDocumentMetadata();
byte[] cproto = converter.getDocumentMedia();
if (logger.isDebugEnabled()) {
logger.debug("converting raw bytes to protocol buffers");
}
DocumentWrapper documentWrapper = converter.toDocumentWrapper(rowId, mproto, cproto);
byte[] dproto = documentWrapper.toByteArray();
key.set(rowId, 0, rowId.length);
if (logger.isDebugEnabled()) {
logger.debug("writing dproto to output");
}
if (dproto != null && dproto.length < MAX_SIZE) {
documentProto.set(dproto, 0, dproto.length);
mos.write("dproto", key, documentProto);
context.getCounter(Counters.DPROTO).increment(1);
} else {
System.out.println("dproto size = " + dproto.length);
context.getCounter(Counters.DPROTO_SKIPPED).increment(1);
}
if (logger.isDebugEnabled()) {
logger.debug("writing mproto to output");
}
if (mproto != null) {
metatdataProto.set(mproto, 0, mproto.length);
mos.write(FAMILY_METADATA_QUALIFIER_PROTO, key, metatdataProto);
context.getCounter(Counters.MPROTO).increment(1);
}
/*
if (cproto != null) {
mediaProto.set(cproto, 0, cproto.length);
mos.write(FAMILY_CONTENT_QUALIFIER_PROTO, key, mediaProto);
context.getCounter(Counters.CPROTO).increment(1);
}
*/
}
@Override
public void cleanup(Context context) throws IOException, InterruptedException {
mos.close();
}
}
public static class ResultToProtoBytesConverter {
Result result;
DocumentWrapper.Builder dw;
public ResultToProtoBytesConverter() {
}
public ResultToProtoBytesConverter(Result result, DocumentWrapper.Builder dw) {
this.result = result;
this.dw = dw;
}
public void set(Result result, DocumentWrapper.Builder dw) {
this.result = result;
this.dw = dw;
}
public byte[] getRowId() {
return result.getRow();
}
public DocumentWrapper toDocumentWrapper() throws ExecException, InvalidProtocolBufferException {
dw.setRowId(ByteString.copyFrom(getRowId()));
dw.setMproto(ByteString.copyFrom(getDocumentMetadata()));
dw.setCproto(ByteString.copyFrom(getDocumentMedia()));
DocumentWrapper build = dw.build();
return build;
}
public DocumentWrapper toDocumentWrapper(byte[] rowid, byte[] mproto, byte[] cproto) throws ExecException, InvalidProtocolBufferException {
dw.setRowId(ByteString.copyFrom(rowid));
dw.setMproto(ByteString.copyFrom(mproto));
dw.setCproto(ByteString.copyFrom(cproto));
DocumentWrapper build = dw.build();
return build;
}
public byte[] getDocumentMetadata() throws ExecException, InvalidProtocolBufferException {
return result.getValue(FAMILY_METADATA_BYTES, FAMILY_METADATA_QUALIFIER_PROTO_BYTES);
}
public byte[] getDocumentMedia() throws ExecException, InvalidProtocolBufferException {
return result.getValue(FAMILY_CONTENT_BYTES, FAMILY_CONTENT_QUALIFIER_PROTO_BYTES);
}
}
@Override
public int run(String[] args) throws Exception {
String tableName = args[0];
String outputDir = args[1];
getOptimizedConfiguration(conf);
Job job = new Job(conf, HBaseToDocumentProtoSequenceFile.class.getSimpleName() + "_" + tableName + "_" + outputDir);
job.setJarByClass(HBaseToDocumentProtoSequenceFile.class);
Scan scan = new Scan();
scan.setCaching(100);
scan.setCacheBlocks(false);
TableMapReduceUtil.initTableMapperJob(tableName, scan, RowToDocumentProtoMapper.class,
BytesWritable.class, BytesWritable.class, job);
job.setNumReduceTasks(0);
job.setOutputKeyClass(BytesWritable.class);
job.setOutputValueClass(BytesWritable.class);
job.setOutputFormatClass(SequenceFileOutputFormat.class);
SequenceFileOutputFormat.setOutputPath(job, new Path(outputDir));
MultipleOutputs.addNamedOutput(job, FAMILY_METADATA_QUALIFIER_PROTO, SequenceFileOutputFormat.class, BytesWritable.class, BytesWritable.class);
MultipleOutputs.addNamedOutput(job, FAMILY_CONTENT_QUALIFIER_PROTO, SequenceFileOutputFormat.class, BytesWritable.class, BytesWritable.class);
MultipleOutputs.addNamedOutput(job, "dproto", SequenceFileOutputFormat.class, BytesWritable.class, BytesWritable.class);
boolean success = job.waitForCompletion(true);
if (!success) {
throw new IOException("Error with job!");
}
return 0;
}
private void getOptimizedConfiguration(Configuration conf) {
conf.set("mapred.child.java.opts", "-Xmx2000m");
conf.set("io.sort.mb", "500");
conf.set("io.sort.spill.percent", "0.90");
conf.set("io.sort.record.percent", "0.15");
}
public static void main(String[] args) throws Exception {
logger.setLevel(Level.ALL);
if (args == null || args.length == 0) {
args = new String[2];
args[0] = "grotoap10";
args[1] = args[0] + "_dump_" + (new Date()).getTime();
}
Configuration conf = HBaseConfiguration.create();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length < 2) {
usage("Wrong number of arguments: " + otherArgs.length);
System.exit(-1);
}
int result = ToolRunner.run(conf, new HBaseToDocumentProtoSequenceFile(), args);
System.exit(result);
}
private static void usage(String info) {
System.out.println(info);
System.out.println("Exemplary command: ");
String command = "hadoop jar target/importers-1.0-SNAPSHOT-jar-with-dependencies.jar"
+ " " + HBaseToDocumentProtoSequenceFile.class.getName() + " <table> <directory>";
System.out.println(command);
}
}
|
package org.broadinstitute.sting.gatk.datasources.simpleDataSources;
import org.testng.Assert;
import org.broadinstitute.sting.BaseTest;
import org.broadinstitute.sting.gatk.refdata.features.table.TableFeature;
import org.broadinstitute.sting.gatk.refdata.tracks.RMDTrack;
import org.broadinstitute.sting.gatk.refdata.tracks.builders.RMDTrackBuilder;
import org.broadinstitute.sting.gatk.refdata.utils.LocationAwareSeekableRODIterator;
import org.broadinstitute.sting.gatk.refdata.utils.RMDTriplet;
import org.broadinstitute.sting.gatk.refdata.utils.RMDTriplet.RMDStorageType;
import org.broadinstitute.sting.utils.GenomeLoc;
import org.broadinstitute.sting.utils.GenomeLocParser;
import org.broadinstitute.sting.utils.fasta.CachingIndexedFastaSequenceFile;
import static org.testng.Assert.assertTrue;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Collections;
import net.sf.picard.reference.IndexedFastaSequenceFile;
/**
* Test the contents and number of iterators in the pool.
*/
public class ReferenceOrderedDataPoolUnitTest extends BaseTest {
private RMDTriplet triplet = null;
private RMDTrackBuilder builder = null;
private IndexedFastaSequenceFile seq;
private GenomeLocParser genomeLocParser;
private GenomeLoc testSite1;
private GenomeLoc testSite2;
private GenomeLoc testSite3;
private GenomeLoc testInterval1; // an interval matching testSite1 -> testSite2 for queries
private GenomeLoc testInterval2; // an interval matching testSite2 -> testSite3 for queries
@BeforeClass
public void init() throws FileNotFoundException {
seq = new CachingIndexedFastaSequenceFile(new File(hg18Reference));
genomeLocParser = new GenomeLocParser(seq);
testSite1 = genomeLocParser.createGenomeLoc("chrM",10);
testSite2 = genomeLocParser.createGenomeLoc("chrM",20);
testSite3 = genomeLocParser.createGenomeLoc("chrM",30);
testInterval1 = genomeLocParser.createGenomeLoc("chrM",10,20);
testInterval2 = genomeLocParser.createGenomeLoc("chrM",20,30);
}
@BeforeMethod
public void setUp() {
String fileName = testDir + "TabularDataTest.dat";
// check to see if we have an index, if so, delete it
File indexFileName = new File(testDir + "TabularDataTest.dat.idx");
if (indexFileName.exists()) indexFileName.delete();
triplet = new RMDTriplet("tableTest","Table",fileName,RMDStorageType.FILE);
builder = new RMDTrackBuilder(Collections.singletonList(triplet),seq.getSequenceDictionary(),genomeLocParser,null);
}
@Test
public void testCreateSingleIterator() {
ResourcePool iteratorPool = new ReferenceOrderedDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
LocationAwareSeekableRODIterator iterator = (LocationAwareSeekableRODIterator)iteratorPool.iterator( new MappedStreamSegment(testSite1) );
Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
TableFeature datum = (TableFeature)iterator.next().get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite1));
assertTrue(datum.get("COL1").equals("A"));
assertTrue(datum.get("COL2").equals("B"));
assertTrue(datum.get("COL3").equals("C"));
iteratorPool.release(iterator);
Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
}
@Test
public void testCreateMultipleIterators() {
ReferenceOrderedQueryDataPool iteratorPool = new ReferenceOrderedQueryDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser);
LocationAwareSeekableRODIterator iterator1 = iteratorPool.iterator( new MappedStreamSegment(testInterval1) );
// Create a new iterator at position 2.
LocationAwareSeekableRODIterator iterator2 = iteratorPool.iterator( new MappedStreamSegment(testInterval2) );
Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
// Test out-of-order access: first iterator2, then iterator1.
// Ugh...first call to a region needs to be a seek.
TableFeature datum = (TableFeature)iterator2.seekForward(testSite2).get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite2));
assertTrue(datum.get("COL1").equals("C"));
assertTrue(datum.get("COL2").equals("D"));
assertTrue(datum.get("COL3").equals("E"));
datum = (TableFeature)iterator1.next().get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite1));
assertTrue(datum.get("COL1").equals("A"));
assertTrue(datum.get("COL2").equals("B"));
assertTrue(datum.get("COL3").equals("C"));
// Advance iterator2, and make sure both iterator's contents are still correct.
datum = (TableFeature)iterator2.next().get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite3));
assertTrue(datum.get("COL1").equals("F"));
assertTrue(datum.get("COL2").equals("G"));
assertTrue(datum.get("COL3").equals("H"));
datum = (TableFeature)iterator1.next().get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite2));
assertTrue(datum.get("COL1").equals("C"));
assertTrue(datum.get("COL2").equals("D"));
assertTrue(datum.get("COL3").equals("E"));
// Cleanup, and make sure the number of iterators dies appropriately.
iteratorPool.release(iterator1);
Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
iteratorPool.release(iterator2);
Assert.assertEquals(iteratorPool.numIterators(), 2, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 2, "Number of available iterators in the pool is incorrect");
}
@Test
public void testIteratorConservation() {
ReferenceOrderedDataPool iteratorPool = new ReferenceOrderedDataPool(triplet,builder,seq.getSequenceDictionary(),genomeLocParser,false);
LocationAwareSeekableRODIterator iterator = iteratorPool.iterator( new MappedStreamSegment(testSite1) );
Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
TableFeature datum = (TableFeature)iterator.next().get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite1));
assertTrue(datum.get("COL1").equals("A"));
assertTrue(datum.get("COL2").equals("B"));
assertTrue(datum.get("COL3").equals("C"));
iteratorPool.release(iterator);
// Create another iterator after the current iterator.
iterator = iteratorPool.iterator( new MappedStreamSegment(testSite3) );
// Make sure that the previously acquired iterator was reused.
Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 0, "Number of available iterators in the pool is incorrect");
datum = (TableFeature)iterator.seekForward(testSite3).get(0).getUnderlyingObject();
assertTrue(datum.getLocation().equals(testSite3));
assertTrue(datum.get("COL1").equals("F"));
assertTrue(datum.get("COL2").equals("G"));
assertTrue(datum.get("COL3").equals("H"));
iteratorPool.release(iterator);
Assert.assertEquals(iteratorPool.numIterators(), 1, "Number of iterators in the pool is incorrect");
Assert.assertEquals(iteratorPool.numAvailableIterators(), 1, "Number of available iterators in the pool is incorrect");
}
}
|
package com.konkerlabs.platform.registry.web.converters.utils;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import com.konkerlabs.platform.registry.business.model.User;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import lombok.Data;
@Component("userAvatarPath")
@Scope(BeanDefinition.SCOPE_PROTOTYPE)
@Data
public class UserAvatarPathUtil {
private static Config config = ConfigFactory.load().getConfig("cdn");
private String absolutePath;
@Autowired
public UserAvatarPathUtil(User user) {
if (config.getBoolean("enabled") && Optional.ofNullable(user.getAvatar()).isPresent()) {
absolutePath = config.getString("prefix") + "/" + config.getString("name") + "/";
absolutePath = absolutePath.concat(user.getAvatar());
} else {
absolutePath = "/resources/konker/images/default-avatar.png";
}
}
}
|
package org.venice.piazza.servicecontroller.messaging;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.Map.Entry;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.errors.WakeupException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.LinkedMultiValueMap;
import org.springframework.util.MultiValueMap;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.ResourceAccessException;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriComponentsBuilder;
import org.venice.piazza.servicecontroller.data.mongodb.accessors.MongoAccessor;
import org.venice.piazza.servicecontroller.messaging.handlers.DeleteServiceHandler;
import org.venice.piazza.servicecontroller.messaging.handlers.DescribeServiceHandler;
import org.venice.piazza.servicecontroller.messaging.handlers.ExecuteServiceHandler;
import org.venice.piazza.servicecontroller.messaging.handlers.ListServiceHandler;
import org.venice.piazza.servicecontroller.messaging.handlers.RegisterServiceHandler;
import org.venice.piazza.servicecontroller.messaging.handlers.SearchServiceHandler;
import org.venice.piazza.servicecontroller.messaging.handlers.UpdateServiceHandler;
import org.venice.piazza.servicecontroller.util.CoreServiceProperties;
import org.venice.piazza.servicecontroller.util.CoreUUIDGen;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import messaging.job.JobMessageFactory;
import messaging.job.WorkerCallback;
import model.data.DataResource;
import model.data.DataType;
import model.data.type.BodyDataType;
import model.data.type.RasterDataType;
import model.data.type.TextDataType;
import model.data.type.URLParameterDataType;
import model.job.Job;
import model.job.PiazzaJobType;
import model.job.result.type.DataResult;
import model.job.result.type.ErrorResult;
import model.job.result.type.TextResult;
import model.job.type.DeleteServiceJob;
import model.job.type.DescribeServiceMetadataJob;
import model.job.type.ExecuteServiceJob;
import model.job.type.IngestJob;
import model.job.type.ListServicesJob;
import model.job.type.RegisterServiceJob;
import model.job.type.SearchServiceJob;
import model.job.type.UpdateServiceJob;
import model.request.PiazzaJobRequest;
import model.service.metadata.ExecuteServiceData;
import model.service.metadata.ParamDataItem;
import model.service.metadata.Service;
import model.status.StatusUpdate;
import util.PiazzaLogger;
import util.UUIDFactory;
public class ServiceMessageWorker implements Runnable {
private final static String TEXT_TYPE="text";
private final static String RASTER_TYPE="raster";
private final static Logger LOGGER = LoggerFactory.getLogger(ServiceMessageWorker.class);
private MongoAccessor accessor;
private PiazzaLogger coreLogger;
private CoreServiceProperties coreServiceProperties;
private Job job = null;
private ConsumerRecord<String, String> consumerRecord;
private Producer<String, String> producer;
private WorkerCallback callback;
private UUIDFactory uuidFactory;
private CoreUUIDGen uuidGenerator;
/**
* Initializes the ServiceMessageWorker which works on handling the jobRequest
* @param consumerRecord
* @param producer
* @param callback
* @param uuidFactory
* @param logger
* @param jobType
*/
public ServiceMessageWorker (ConsumerRecord<String, String> consumerRecord,
Producer<String, String> producer, MongoAccessor accessor, WorkerCallback callback,
CoreServiceProperties coreServiceProperties, UUIDFactory uuidFactory,
PiazzaLogger logger,Job job) {
this.job = job;
this.consumerRecord = consumerRecord;
this.producer = producer;
this.accessor = accessor;
this.callback = callback;
this.coreLogger = logger;
this.uuidFactory = uuidFactory;
this.uuidGenerator = uuidGenerator;
}
/**
* Handles service job requests
*/
public void run() {
try {
String handleUpdate = StatusUpdate.STATUS_SUCCESS;
String handleTextUpdate = "";
ResponseEntity<List<String>> handleResult = null;
ObjectMapper mapper = new ObjectMapper();
try {
// if a jobType has been declared
if (job != null) {
PiazzaJobType jobType = job.getJobType();
LOGGER.debug("Job ID:" + job.getJobId());
if (jobType instanceof RegisterServiceJob) {
LOGGER.debug("RegisterServiceJob Detected");
// Handle Register Job
RegisterServiceHandler rsHandler = new RegisterServiceHandler(accessor, coreServiceProperties, coreLogger, uuidFactory);
handleResult = rsHandler.handle(jobType);
handleResult = checkResult(handleResult);
sendRegisterStatus(job, handleUpdate, handleResult);
} else if (jobType instanceof ExecuteServiceJob) {
LOGGER.debug("ExecuteServiceJob Detected");
// Get the ResourceMetadata
ExecuteServiceJob jobItem = (ExecuteServiceJob)jobType;
ExecuteServiceData esData = jobItem.data;
DataType dataType= esData.getDataOutput();
if (dataType.getType().equals("raster")) {
// Call special method to call and send
handleRasterType(jobItem);
}
else {
LOGGER.debug("ExecuteServiceJob Original Way");
ExecuteServiceHandler esHandler = new ExecuteServiceHandler(accessor, coreServiceProperties, coreLogger);
handleResult = esHandler.handle(jobType);
LOGGER.debug("Execution handled");
handleResult = checkResult(handleResult);
LOGGER.debug("Send Execute Status KAFKA");
sendExecuteStatus(job, handleUpdate, handleResult);
}
}
else if (jobType instanceof UpdateServiceJob) {
UpdateServiceHandler usHandler = new UpdateServiceHandler(accessor, coreServiceProperties, coreLogger, uuidFactory);
handleResult = usHandler.handle(jobType);
handleResult = checkResult(handleResult);
sendUpdateStatus(job, handleUpdate, handleResult);
}
else if (jobType instanceof DeleteServiceJob) {
DeleteServiceHandler dlHandler = new DeleteServiceHandler(accessor, coreServiceProperties, coreLogger, uuidFactory);
handleResult = dlHandler.handle(jobType);
handleResult = checkResult(handleResult);
sendDeleteStatus(job, handleUpdate, handleResult);
}
else if (jobType instanceof DescribeServiceMetadataJob) {
DescribeServiceHandler dsHandler = new DescribeServiceHandler(accessor, coreServiceProperties, coreLogger);
handleResult = dsHandler.handle(jobType);
handleResult = checkResult(handleResult);
sendDescribeStatus(job, handleUpdate, handleResult);
}
else if (jobType instanceof ListServicesJob) {
ListServiceHandler lsHandler = new ListServiceHandler(accessor, coreServiceProperties, coreLogger);
handleResult = lsHandler.handle(jobType);
handleResult = checkResult(handleResult);
sendListStatus(job, handleUpdate, handleResult);
}
else if (jobType instanceof SearchServiceJob) {
SearchServiceHandler ssHandler = new SearchServiceHandler(accessor, coreServiceProperties, coreLogger);
handleResult = ssHandler.handle(jobType);
handleResult = checkResult(handleResult);
sendSearchStatus(job, handleUpdate, handleResult);
}
}// if job not null
} catch (IOException ex) {
LOGGER.error(ex.getMessage());
handleUpdate = StatusUpdate.STATUS_ERROR;
handleTextUpdate = ex.getMessage();
}
catch (ResourceAccessException rex) {
LOGGER.error(rex.getMessage());
handleTextUpdate = rex.getMessage();
handleUpdate = StatusUpdate.STATUS_ERROR;
}
catch (HttpClientErrorException hex) {
LOGGER.error(hex.getMessage());
handleUpdate = StatusUpdate.STATUS_ERROR;
handleTextUpdate = hex.getMessage();
}
// if there was no result set then
// use the default error messages set.
if (handleResult == null) {
StatusUpdate su = new StatusUpdate();
su.setStatus(handleUpdate);
// Create a text result and update status
ErrorResult errorResult = new ErrorResult();
errorResult.setMessage(handleTextUpdate);
su.setResult(errorResult);
ProducerRecord<String,String> prodRecord =
new ProducerRecord<String,String> (JobMessageFactory.UPDATE_JOB_TOPIC_NAME,job.getJobId(),
mapper.writeValueAsString(su));
producer.send(prodRecord);
}
// If the status is not ok and the job is not equal to null
// then send an update to the job manager that there was some failure
else {
boolean eResult = ((handleResult.getStatusCode() != HttpStatus.OK) && (job != null))?false:false;
if (eResult) {
handleUpdate = StatusUpdate.STATUS_FAIL;
handleResult = checkResult(handleResult);
String serviceControlString = mapper.writeValueAsString(handleResult);
StatusUpdate su = new StatusUpdate();
su.setStatus(handleUpdate);
// Create a text result and update status
ErrorResult errorResult = new ErrorResult();
errorResult.setMessage(serviceControlString);
su.setResult(errorResult);
ProducerRecord<String,String> prodRecord =
new ProducerRecord<String,String> (JobMessageFactory.UPDATE_JOB_TOPIC_NAME,job.getJobId(),
mapper.writeValueAsString(su));
producer.send(prodRecord);
}
}
} catch (WakeupException ex) {
LOGGER.error(ex.getMessage());
} catch (JsonProcessingException ex) {
LOGGER.error(ex.getMessage());
}
}
private void sendListStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException {
if (handleResult != null) {
// Create a text result and update status
StatusUpdate su = new StatusUpdate();
su.setStatus(StatusUpdate.STATUS_SUCCESS);
List <String>stringList = handleResult.getBody();
TextResult textResult = new TextResult();
textResult.setText(stringList.get(0));
su.setResult(textResult);
if (handleResult.getStatusCode() == HttpStatus.OK) {
LOGGER.debug("THe STATUS is " + su.getStatus());
LOGGER.debug("THe RESULT is " + su.getResult());
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su);
producer.send(prodRecord);
}
else {
su = new StatusUpdate(StatusUpdate.STATUS_ERROR);
su.setResult(new ErrorResult(stringList.get(0), handleResult.getStatusCode().toString()));
producer.send(JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su));
}
}
}
/**
* Sends an update for registering a job
*
*/
private void sendRegisterStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException {
if (handleResult != null) {
// Create a text result and update status
StatusUpdate su = new StatusUpdate();
su.setStatus(StatusUpdate.STATUS_SUCCESS);
List <String>stringList = handleResult.getBody();
TextResult textResult = new TextResult();
textResult.setText(stringList.get(0));
su.setResult(textResult);
if (handleResult.getStatusCode() == HttpStatus.OK) {
LOGGER.debug("THe STATUS is " + su.getStatus());
LOGGER.debug("THe RESULT is " + su.getResult());
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su);
producer.send(prodRecord);
}
else {
su = new StatusUpdate(StatusUpdate.STATUS_ERROR);
su.setResult(new ErrorResult(stringList.get(0), handleResult.getStatusCode().toString()));
producer.send(JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su));
}
}
}
/**
* Sends the list of services to the job
*/
private void sendSearchStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException {
if (handleResult != null) {
// Create a text result and update status
StatusUpdate su = new StatusUpdate();
su.setStatus(StatusUpdate.STATUS_SUCCESS);
List <String>stringList = handleResult.getBody();
TextResult textResult = new TextResult();
textResult.setText(stringList.get(0));
su.setResult(textResult);
if (handleResult.getStatusCode() == HttpStatus.OK) {
LOGGER.debug("THe STATUS is " + su.getStatus());
LOGGER.debug("THe RESULT is " + su.getResult());
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su);
producer.send(prodRecord);
}
else {
su = new StatusUpdate(StatusUpdate.STATUS_ERROR);
su.setResult(new ErrorResult(stringList.get(0), "No Results returned from the search. HTTP Status:" + handleResult.getStatusCode().toString()));
producer.send(JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su));
}
}
}
/**
* Sends an update for registering a job
* Message is sent on Kafka Queue
*
*/
private void sendUpdateStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException {
ObjectMapper mapper = new ObjectMapper();
String serviceControlString = mapper.writeValueAsString(handleResult.getBody());
StatusUpdate su = new StatusUpdate();
su.setStatus(serviceControlString);
ProducerRecord<String,String> prodRecord =
new ProducerRecord<String,String> (JobMessageFactory.UPDATE_JOB_TOPIC_NAME,job.getJobId(),
mapper.writeValueAsString(su));
producer.send(prodRecord);
}
/**
* Sends an update for deleting the resource
* Resource is not deleted but marked as unavailable
* Message is sent on Kafka Queue
*
*/
private void sendDeleteStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException {
if (handleResult != null) {
// Create a text result and update status
StatusUpdate su = new StatusUpdate();
su.setStatus(StatusUpdate.STATUS_SUCCESS);
List <String>stringList = handleResult.getBody();
TextResult textResult = new TextResult();
// Get the resource ID and set it as the result
textResult.setText(stringList.get(1));
su.setResult(textResult);
if (handleResult.getStatusCode() == HttpStatus.OK) {
LOGGER.debug("THe STATUS is " + su.getStatus());
LOGGER.debug("THe RESULT is " + su.getResult());
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su);
producer.send(prodRecord);
}
else {
su = new StatusUpdate(StatusUpdate.STATUS_ERROR);
su.setResult(new ErrorResult(stringList.get(0), "Resource cold not be deleted. HTTP Status:" + handleResult.getStatusCode().toString()));
producer.send(JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su));
}
}
}
/**
* Sends an update for describing the resource
* Message is sent on Kafka Queue
*
*/
private void sendDescribeStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException {
if (handleResult != null) {
// Create a text result and update status
StatusUpdate su = new StatusUpdate();
su.setStatus(StatusUpdate.STATUS_SUCCESS);
List <String>stringList = handleResult.getBody();
TextResult textResult = new TextResult();
textResult.setText(stringList.get(0));
su.setResult(textResult);
if (handleResult.getStatusCode() == HttpStatus.OK) {
LOGGER.debug("THe STATUS is " + su.getStatus());
LOGGER.debug("THe RESULT is " + su.getResult());
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su);
producer.send(prodRecord);
}
else {
su = new StatusUpdate(StatusUpdate.STATUS_ERROR);
su.setResult(new ErrorResult(stringList.get(0), "Resource cold not be deleted. HTTP Status:" + handleResult.getStatusCode().toString()));
producer.send(JobMessageFactory.getUpdateStatusMessage(job.getJobId(), su));
}
}
}
/**
* Send an execute job status and the resource that was used
* Message is sent on Kafka Queue
* @param job
* @param status
* @param handleResult
* @throws JsonProcessingException
*/
private void sendExecuteStatus(Job job, String status, ResponseEntity<List<String>> handleResult) throws JsonProcessingException, IOException {
ObjectMapper mapper = new ObjectMapper();
String serviceControlString = mapper.writeValueAsString(handleResult.getBody());
// Now produce a new record
PiazzaJobRequest pjr = new PiazzaJobRequest();
// TODO read from properties file
pjr.apiKey = "pz-sc-ingest-test";
IngestJob ingestJob = new IngestJob();
DataResource data = new DataResource();
//TODO MML UUIDGen
data.dataId = uuidFactory.getUUID();
TextDataType tr = new TextDataType();
tr.content = serviceControlString;
data.dataType = tr;
ingestJob.data=data;
ingestJob.host = true;
pjr.jobType = ingestJob;
// TODO Generate 123-456 with UUIDGen
ProducerRecord<String,String> newProdRecord =
JobMessageFactory.getRequestJobMessage(pjr, uuidFactory.getUUID());
producer.send(newProdRecord);
StatusUpdate statusUpdate = new StatusUpdate(StatusUpdate.STATUS_SUCCESS);
// Create a text result and update status
DataResult textResult = new DataResult(data.dataId);
statusUpdate.setResult(textResult);
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), statusUpdate);
producer.send(prodRecord);
}
/**
* This method is for demonstrating ingest of raster data
* This will be refactored once the API changes have been communicated to
* other team members
*/
public void handleRasterType(ExecuteServiceJob executeJob) {
RestTemplate restTemplate = new RestTemplate();
ExecuteServiceData data = executeJob.data;
// Get the id from the data
String serviceId = data.getServiceId();
Service sMetadata = accessor.getServiceById(serviceId);
// Default request mimeType application/json
String requestMimeType = "application/json";
MultiValueMap<String, String> map = new LinkedMultiValueMap<String, String>();
UriComponentsBuilder builder = UriComponentsBuilder.fromHttpUrl(sMetadata.getResourceMetadata().url);
Set<String> parameterNames = new HashSet<String>();
if (sMetadata.getInputs() != null && sMetadata.getInputs().size() > 0) {
for (ParamDataItem pdataItem : sMetadata.getInputs()) {
if (pdataItem.getDataType() instanceof URLParameterDataType) {
parameterNames.add(pdataItem.getName());
}
}
}
Map<String,DataType> postObjects = new HashMap<String,DataType>();
Iterator<Entry<String,DataType>> it = data.getDataInputs().entrySet().iterator();
String postString = "";
while (it.hasNext()) {
Entry<String,DataType> entry = it.next();
String inputName = entry.getKey();
if (parameterNames.contains(inputName)) {
if (entry.getValue() instanceof TextDataType) {
String paramValue = ((TextDataType)entry.getValue()).getContent();
if (inputName.length() == 0) {
builder = UriComponentsBuilder.fromHttpUrl(sMetadata.getResourceMetadata().url + "?" + paramValue);
}
else {
builder.queryParam(inputName,paramValue);
}
}
else {
LOGGER.error("URL parameter value has to be specified in TextDataType" );
return;
}
}
else if (entry.getValue() instanceof BodyDataType){
BodyDataType bdt = (BodyDataType)entry.getValue();
postString = bdt.getContent();
requestMimeType = bdt.getMimeType();
}
//Default behavior for other inputs, put them in list of objects
// which are transformed into JSON consistent with default requestMimeType
else {
postObjects.put(inputName, entry.getValue());
}
}
if (postString.length() > 0 && postObjects.size() > 0) {
LOGGER.error("String Input not consistent with other Inputs");
return;
}
else if (postObjects.size() > 0){
ObjectMapper mapper = new ObjectMapper();
try {
postString = mapper.writeValueAsString(postObjects);
} catch (JsonProcessingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
URI url = URI.create(builder.toUriString());
HttpHeaders headers = new HttpHeaders();
// Set the mimeType of the request
MediaType mediaType = createMediaType(requestMimeType);
headers.setContentType(mediaType);
// Set the mimeType of the request
//headers.add("Content-type", sMetadata.getOutputs().get(0).getDataType().getMimeType());
HttpEntity<String> requestEntity = null;
if (postString.length() > 0) {
LOGGER.debug("The postString is " + postString);
requestEntity = this.buildHttpEntity(sMetadata, headers, postString);
}
else {
requestEntity = new HttpEntity(headers);
}
try {
LOGGER.debug("About to call special service");
//ResponseEntity<DataResource> response = restTemplate.exchange(url, HttpMethod.POST, requestEntity, DataResource.class);
LOGGER.debug("URL calling" + url);
ResponseEntity<DataResource> response = restTemplate.postForEntity(url, requestEntity, DataResource.class);
DataResource dataResource = response.getBody();
dataResource.dataId = uuidFactory.getUUID();
PiazzaJobRequest pjr = new PiazzaJobRequest();
pjr.apiKey = "pz-sc-ingest-raster-test";
IngestJob ingestJob = new IngestJob();
ingestJob.data=dataResource;
ingestJob.host = true;
pjr.jobType = ingestJob;
ProducerRecord<String,String> newProdRecord =
JobMessageFactory.getRequestJobMessage(pjr, uuidFactory.getUUID());
producer.send(newProdRecord);
LOGGER.debug("newProdRecord sent" + newProdRecord.toString());
StatusUpdate statusUpdate = new StatusUpdate(StatusUpdate.STATUS_SUCCESS);
// Create a text result and update status
DataResult textResult = new DataResult(dataResource.dataId);
statusUpdate.setResult(textResult);
ProducerRecord<String,String> prodRecord = JobMessageFactory.getUpdateStatusMessage(job.getJobId(), statusUpdate);
producer.send(prodRecord);
LOGGER.debug("prodRecord sent" + prodRecord.toString());
} catch (JsonProcessingException jpe) {
jpe.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
private MediaType createMediaType(String mimeType) {
MediaType mediaType;
String type, subtype;
StringBuffer sb = new StringBuffer(mimeType);
int index = sb.indexOf("/");
// If a slash was found then there is a type and subtype
if (index != -1) {
type = sb.substring(0, index);
subtype = sb.substring(index+1, mimeType.length());
mediaType = new MediaType(type, subtype);
LOGGER.debug("The type is="+type);
LOGGER.debug("The subtype="+subtype);
}
else {
// Assume there is just a type for the mime, no subtype
mediaType = new MediaType(mimeType);
}
return mediaType;
}
public HttpEntity<String> buildHttpEntity(Service sMetadata, MultiValueMap<String, String> headers, String data) {
HttpEntity<String> requestEntity = new HttpEntity<String>(data,headers);
return requestEntity;
}
/**
* Check to see if there is a valid handleResult that was created. If not,
* then create a message with No Content
* @param handleResult
* @return handleResult - Created if the result is not valid
*/
private ResponseEntity<List<String>> checkResult(ResponseEntity<List<String>> handleResult) {
if (handleResult == null) {
handleResult = new ResponseEntity<List<String>>(new ArrayList<String>(),HttpStatus.NO_CONTENT);
}
return handleResult;
}
}
|
package org.codehaus.modello.plugin.xpp3;
import java.io.IOException;
import java.util.Iterator;
import java.util.Properties;
import org.codehaus.modello.ModelloException;
import org.codehaus.modello.model.Model;
import org.codehaus.modello.model.ModelAssociation;
import org.codehaus.modello.model.ModelClass;
import org.codehaus.modello.model.ModelDefault;
import org.codehaus.modello.model.ModelField;
import org.codehaus.modello.plugin.java.JavaClassMetadata;
import org.codehaus.modello.plugin.java.javasource.JClass;
import org.codehaus.modello.plugin.java.javasource.JField;
import org.codehaus.modello.plugin.java.javasource.JMethod;
import org.codehaus.modello.plugin.java.javasource.JParameter;
import org.codehaus.modello.plugin.java.javasource.JSourceCode;
import org.codehaus.modello.plugin.java.javasource.JSourceWriter;
import org.codehaus.modello.plugin.java.javasource.JType;
import org.codehaus.modello.plugins.xml.XmlAssociationMetadata;
import org.codehaus.modello.plugins.xml.XmlClassMetadata;
import org.codehaus.modello.plugins.xml.XmlFieldMetadata;
import org.codehaus.plexus.util.StringUtils;
/**
* @author <a href="mailto:jason@modello.org">Jason van Zyl</a>
* @author <a href="mailto:evenisse@codehaus.org">Emmanuel Venisse</a>
* @version $Id$
*/
public class Xpp3ReaderGenerator
extends AbstractXpp3Generator
{
public void generate( Model model, Properties parameters )
throws ModelloException
{
initialize( model, parameters );
try
{
generateXpp3Reader();
}
catch ( IOException ex )
{
throw new ModelloException( "Exception while generating XPP3 Reader.", ex );
}
}
private void generateXpp3Reader()
throws ModelloException, IOException
{
Model objectModel = getModel();
String packageName = objectModel.getDefaultPackageName( isPackageWithVersion(), getGeneratedVersion() )
+ ".io.xpp3";
String unmarshallerName = getFileName( "Xpp3Reader" );
JSourceWriter sourceWriter = newJSourceWriter( packageName, unmarshallerName );
JClass jClass = new JClass( packageName + '.' + unmarshallerName );
jClass.addImport( "org.codehaus.plexus.util.ReaderFactory" );
jClass.addImport( "org.codehaus.plexus.util.xml.pull.MXParser" );
jClass.addImport( "org.codehaus.plexus.util.xml.pull.XmlPullParser" );
jClass.addImport( "org.codehaus.plexus.util.xml.pull.XmlPullParserException" );
jClass.addImport( "java.io.InputStream" );
jClass.addImport( "java.io.IOException" );
jClass.addImport( "java.io.Reader" );
jClass.addImport( "java.text.DateFormat" );
jClass.addImport( "java.util.Locale" );
addModelImports( jClass, null );
// Write option setters
// The Field
JField addDefaultEntities = new JField( JType.BOOLEAN, "addDefaultEntities" );
addDefaultEntities.setComment(
"If set the parser will be loaded with all single characters from the XHTML specification.\n" +
"The entities used:\n" + "<ul>\n" + "<li>http:
"<li>http:
"<li>http:
addDefaultEntities.setInitString( "true" );
jClass.addField( addDefaultEntities );
// The setter
JMethod addDefaultEntitiesSetter = new JMethod( "setAddDefaultEntities" );
addDefaultEntitiesSetter.addParameter( new JParameter( JType.BOOLEAN, "addDefaultEntities" ) );
addDefaultEntitiesSetter.setSourceCode( "this.addDefaultEntities = addDefaultEntities;" );
addDefaultEntitiesSetter.setComment( "Sets the state of the \"add default entities\" flag." );
jClass.addMethod( addDefaultEntitiesSetter );
// The getter
JMethod addDefaultEntitiesGetter = new JMethod( "getAddDefaultEntities", JType.BOOLEAN, null );
addDefaultEntitiesGetter.setComment( "Returns the state of the \"add default entities\" flag." );
addDefaultEntitiesGetter.setSourceCode( "return addDefaultEntities;" );
jClass.addMethod( addDefaultEntitiesGetter );
// Write the parse(Reader) method which will do the unmarshalling.
ModelClass root = objectModel.getClass( objectModel.getRoot( getGeneratedVersion() ), getGeneratedVersion() );
JMethod unmarshall = new JMethod( "read", new JClass( root.getName() ), null );
unmarshall.setComment( "@see ReaderFactory#newXmlReader" );
unmarshall.addParameter( new JParameter( new JClass( "Reader" ), "reader" ) );
unmarshall.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
unmarshall.addException( new JClass( "IOException" ) );
unmarshall.addException( new JClass( "XmlPullParserException" ) );
JSourceCode sc = unmarshall.getSourceCode();
sc.add( "XmlPullParser parser = new MXParser();" );
sc.add( "" );
sc.add( "parser.setInput( reader );" );
sc.add( "" );
writeParserInitialization( sc );
sc.add( "" );
sc.add( "parser.next();" );
sc.add( "return parse" + root.getName() + "( \"" + getTagName( root ) + "\", parser, strict );" );
jClass.addMethod( unmarshall );
unmarshall = new JMethod( "read", new JClass( root.getName() ), null );
unmarshall.setComment( "@see ReaderFactory#newXmlReader" );
unmarshall.addParameter( new JParameter( new JClass( "Reader" ), "reader" ) );
unmarshall.addException( new JClass( "IOException" ) );
unmarshall.addException( new JClass( "XmlPullParserException" ) );
sc = unmarshall.getSourceCode();
sc.add( "return read( reader, true );" );
jClass.addMethod( unmarshall );
// Write the parse(InputStream) method which will do the unmarshalling.
unmarshall = new JMethod( "read", new JClass( root.getName() ), null );
unmarshall.addParameter( new JParameter( new JClass( "InputStream" ), "in" ) );
unmarshall.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
unmarshall.addException( new JClass( "IOException" ) );
unmarshall.addException( new JClass( "XmlPullParserException" ) );
sc = unmarshall.getSourceCode();
sc.add( "Reader reader = ReaderFactory.newXmlReader( in );" );
sc.add( "" );
sc.add( "return read( reader, strict );" );
jClass.addMethod( unmarshall );
unmarshall = new JMethod( "read", new JClass( root.getName() ), null );
unmarshall.addParameter( new JParameter( new JClass( "InputStream" ), "in" ) );
unmarshall.addException( new JClass( "IOException" ) );
unmarshall.addException( new JClass( "XmlPullParserException" ) );
sc = unmarshall.getSourceCode();
sc.add( "Reader reader = ReaderFactory.newXmlReader( in );" );
sc.add( "" );
sc.add( "return read( reader );" );
jClass.addMethod( unmarshall );
// Write the class parsers
writeAllClassesParser( objectModel, jClass );
// Write helpers
writeHelpers( jClass );
jClass.print( sourceWriter );
sourceWriter.close();
}
private String getTagName( ModelClass root )
{
XmlClassMetadata metadata = (XmlClassMetadata) root.getMetadata( XmlClassMetadata.ID );
String tagName = metadata.getTagName();
if ( tagName != null )
{
return tagName;
}
return uncapitalise( root.getName() );
}
private void writeAllClassesParser( Model objectModel, JClass jClass )
{
ModelClass root = objectModel.getClass( objectModel.getRoot( getGeneratedVersion() ), getGeneratedVersion() );
for ( Iterator i = objectModel.getClasses( getGeneratedVersion() ).iterator(); i.hasNext(); )
{
ModelClass clazz = (ModelClass) i.next();
writeClassParser( clazz, jClass, root.getName().equals( clazz.getName() ) );
}
}
private void writeClassParser( ModelClass modelClass, JClass jClass, boolean rootElement )
{
JavaClassMetadata md = (JavaClassMetadata) modelClass.getMetadata(JavaClassMetadata.class.getName());
// Skip abstract classes, no way to parse them out into objects
if (md.isAbstract()) {
return;
}
String className = modelClass.getName();
String capClassName = capitalise( className );
String uncapClassName = uncapitalise( className );
JMethod unmarshall = new JMethod( "parse" + capClassName, new JClass( className ), null );
unmarshall.getModifiers().makePrivate();
unmarshall.addParameter( new JParameter( new JClass( "String" ), "tagName" ) );
unmarshall.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
unmarshall.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
unmarshall.addException( new JClass( "IOException" ) );
unmarshall.addException( new JClass( "XmlPullParserException" ) );
JSourceCode sc = unmarshall.getSourceCode();
sc.add( className + " " + uncapClassName + " = new " + className + "();" );
ModelField contentField = null;
for ( Iterator i = modelClass.getAllFields( getGeneratedVersion(), true ).iterator(); i.hasNext(); )
{
ModelField field = (ModelField) i.next();
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) field.getMetadata( XmlFieldMetadata.ID );
if ( fieldMetadata.isAttribute() )
{
writePrimitiveField( field, field.getType(), uncapClassName, "set" + capitalise( field.getName() ), sc,
jClass );
}
// TODO check if we have already one with this type and throws Exception
if ( "Content".equals( field.getType() ) )
{
contentField = field;
}
}
if (contentField != null)
{
sc.add( "parser.next();" );
sc.add( uncapClassName + ".set" + capitalise( contentField.getName() )
+ "( getTrimmedValue( parser.getText() ) ); " );
}
sc.add( "java.util.Set parsed = new java.util.HashSet();" );
if ( rootElement )
{
sc.add( "int eventType = parser.getEventType();" );
sc.add( "boolean foundRoot = false;" );
sc.add( "while ( eventType != XmlPullParser.END_DOCUMENT )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( eventType == XmlPullParser.START_TAG )" );
}
else
{
sc.add( "while ( parser.nextTag() == XmlPullParser.START_TAG )" );
}
sc.add( "{" );
sc.indent();
String statement = "if";
if ( rootElement )
{
sc.add( "if ( parser.getName().equals( tagName ) )" );
sc.add( "{" );
sc.addIndented( "foundRoot = true;" );
sc.add( "}" );
statement = "else if";
}
//Write other fields
for ( Iterator i = modelClass.getAllFields( getGeneratedVersion(), true ).iterator(); i.hasNext(); )
{
ModelField field = (ModelField) i.next();
XmlFieldMetadata fieldMetadata = (XmlFieldMetadata) field.getMetadata( XmlFieldMetadata.ID );
if ( !fieldMetadata.isAttribute() )
{
processField( fieldMetadata, field, statement, sc, uncapClassName, modelClass, jClass );
statement = "else if";
}
}
if ( !rootElement )
{
/*
if ( modelClass.getFields( getGeneratedVersion() ).size() > 0 )
{
sc.add( "else" );
sc.add( "{" );
sc.addIndented( "parser.nextText();" );
sc.add( "}" );
}
*/
if ( statement.startsWith( "else" ) )
{
sc.add( "else" );
sc.add( "{" );
sc.indent();
}
sc.add( "if ( strict )" );
sc.add( "{" );
sc.addIndented(
"throw new XmlPullParserException( \"Unrecognised tag: '\" + parser.getName() + \"'\", parser, null );" );
sc.add( "}" );
sc.add( "else" );
sc.add( "{" );
sc.indent();
sc.add( "// swallow up to end tag since this is not valid" );
sc.add( "while ( parser.next() != XmlPullParser.END_TAG ) {}" );
sc.unindent();
sc.add( "}" );
if ( statement.startsWith( "else" ) )
{
sc.unindent();
sc.add( "}" );
}
}
else
{
sc.add( "else if ( strict )" );
sc.add( "{" );
sc.addIndented(
"throw new XmlPullParserException( \"Unrecognised tag: '\" + parser.getName() + \"'\", parser, null );" );
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.add( "eventType = parser.next();" );
}
sc.unindent();
sc.add( "}" );
sc.add( "return " + uncapClassName + ";" );
jClass.addMethod( unmarshall );
}
private void processField( XmlFieldMetadata fieldMetadata, ModelField field, String statement, JSourceCode sc,
String uncapClassName, ModelClass modelClass, JClass jClass )
{
String tagName = fieldMetadata.getTagName();
if ( tagName == null )
{
tagName = field.getName();
}
String singularTagName = fieldMetadata.getAssociationTagName();
if ( singularTagName == null )
{
singularTagName = singular( tagName );
}
boolean wrappedList = XmlFieldMetadata.LIST_STYLE_WRAPPED.equals( fieldMetadata.getListStyle() );
String capFieldName = capitalise( field.getName() );
String singularName = singular( field.getName() );
String alias;
if ( StringUtils.isEmpty( field.getAlias() ) )
{
alias = "null";
}
else
{
alias = "\"" + field.getAlias() + "\"";
}
String tagComparison =
statement + " ( checkFieldWithDuplicate( parser, \"" + tagName + "\", " + alias + ", parsed ) )";
if ( field instanceof ModelAssociation )
{
ModelAssociation association = (ModelAssociation) field;
String associationName = association.getName();
if ( ModelAssociation.ONE_MULTIPLICITY.equals( association.getMultiplicity() ) )
{
sc.add( tagComparison );
sc.add( "{" );
sc.addIndented( uncapClassName + ".set" + capFieldName + "( parse" + association.getTo() + "( \""
+ tagName + "\", parser, strict ) );" );
sc.add( "}" );
}
else
{
//MANY_MULTIPLICITY
String type = association.getType();
if ( ModelDefault.LIST.equals( type ) || ModelDefault.SET.equals( type ) )
{
if ( wrappedList )
{
sc.add( tagComparison );
sc.add( "{" );
sc.indent();
sc.add( type + " " + associationName + " = " + association.getDefaultValue() + ";" );
sc.add( uncapClassName + ".set" + capFieldName + "( " + associationName + " );" );
sc.add( "while ( parser.nextTag() == XmlPullParser.START_TAG )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( parser.getName().equals( \"" + singularTagName + "\" ) )" );
sc.add( "{" );
sc.indent();
}
else
{
sc.add( statement + " ( parser.getName().equals( \"" + singularTagName + "\" ) )" );
sc.add( "{" );
sc.indent();
sc.add( type + " " + associationName + " = " + uncapClassName + ".get" + capFieldName + "();" );
sc.add( "if ( " + associationName + " == null )" );
sc.add( "{" );
sc.indent();
sc.add( associationName + " = " + association.getDefaultValue() + ";" );
sc.add( uncapClassName + ".set" + capFieldName + "( " + associationName + " );" );
sc.unindent();
sc.add( "}" );
}
if ( isClassInModel( association.getTo(), modelClass.getModel() ) )
{
sc.add( associationName + ".add( parse" + association.getTo() + "( \"" + singularTagName +
"\", parser, strict ) );" );
}
else
{
writePrimitiveField( association, association.getTo(), associationName, "add", sc, jClass );
}
if ( wrappedList )
{
sc.unindent();
sc.add( "}" );
sc.add( "else if ( strict )" );
sc.add( "{" );
sc.addIndented( "throw new XmlPullParserException( \"Unrecognised association: '\" + "
+ "parser.getName() + \"'\", parser, null );" );
sc.add( "}" );
sc.add( "else" );
sc.add( "{" );
sc.indent();
sc.add( "// swallow up to end tag since this is not valid" );
sc.add( "while ( parser.next() != XmlPullParser.END_TAG ) {}" );
sc.unindent();
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.unindent();
sc.add( "}" );
}
else
{
sc.unindent();
sc.add( "}" );
}
}
else
{
//Map or Properties
sc.add( tagComparison );
sc.add( "{" );
sc.indent();
XmlAssociationMetadata xmlAssociationMetadata =
(XmlAssociationMetadata) association.getAssociationMetadata( XmlAssociationMetadata.ID );
if ( XmlAssociationMetadata.EXPLODE_MODE.equals( xmlAssociationMetadata.getMapStyle() ) )
{
sc.add( "while ( parser.nextTag() == XmlPullParser.START_TAG )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( parser.getName().equals( \"" + singularTagName + "\" ) )" );
sc.add( "{" );
sc.indent();
sc.add( "String key = null;" );
sc.add( "String value = null;" );
sc.add( "// " + xmlAssociationMetadata.getMapStyle() + " mode." );
sc.add( "while ( parser.nextTag() == XmlPullParser.START_TAG )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( parser.getName().equals( \"key\" ) )" );
sc.add( "{" );
sc.addIndented( "key = parser.nextText();" );
sc.add( "}" );
sc.add( "else if ( parser.getName().equals( \"value\" ) )" );
sc.add( "{" );
sc.addIndented( "value = parser.nextText()"
+ ( fieldMetadata.isTrim() ? ".trim()" : "" ) + ";" );
sc.add( "}" );
sc.add( "else" );
sc.add( "{" );
sc.addIndented( "parser.nextText();" );
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.add( uncapClassName + ".add" + capitalise( singularName ) + "( key, value );" );
sc.unindent();
sc.add( "}" );
sc.add( "parser.next();" );
sc.unindent();
sc.add( "}" );
}
else
{
//INLINE Mode
sc.add( "while ( parser.nextTag() == XmlPullParser.START_TAG )" );
sc.add( "{" );
sc.indent();
sc.add( "String key = parser.getName();" );
sc.add( "String value = parser.nextText()" + ( fieldMetadata.isTrim() ? ".trim()" : "" ) + ";" );
sc.add( uncapClassName + ".add" + capitalise( singularName ) + "( key, value );" );
sc.unindent();
sc.add( "}" );
}
sc.unindent();
sc.add( "}" );
}
}
}
else
{
sc.add( tagComparison );
sc.add( "{" );
sc.indent();
//ModelField
writePrimitiveField( field, field.getType(), uncapClassName, "set" + capitalise( field.getName() ), sc,
jClass );
sc.unindent();
sc.add( "}" );
}
}
private void writePrimitiveField( ModelField field, String type, String objectName, String setterName,
JSourceCode sc, JClass jClass )
{
XmlFieldMetadata fieldMetaData = (XmlFieldMetadata) field.getMetadata( XmlFieldMetadata.ID );
String tagName = fieldMetaData.getTagName();
String parserGetter;
if ( tagName == null )
{
tagName = field.getName();
}
if ( fieldMetaData.isAttribute() )
{
parserGetter = "parser.getAttributeValue( \"\", \"" + tagName + "\" )";
}
else
{
parserGetter = "parser.nextText()";
}
/* TODO: this and a default
if ( fieldMetaData.isRequired() )
{
parserGetter = "getRequiredAttributeValue( " + parserGetter + ", \"" + tagName + "\", parser, strict )";
}
*/
if ( fieldMetaData.isTrim() )
{
parserGetter = "getTrimmedValue( " + parserGetter + " )";
}
if ( "boolean".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getBooleanValue( " + parserGetter + ", \"" + tagName +
"\", parser, \"" + field.getDefaultValue() + "\" ) );" );
}
else if ( "char".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getCharacterValue( " + parserGetter + ", \"" + tagName +
"\", parser ) );" );
}
else if ( "double".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getDoubleValue( " + parserGetter + ", \"" + tagName +
"\", parser, strict ) );" );
}
else if ( "float".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getFloatValue( " + parserGetter + ", \"" + tagName +
"\", parser, strict ) );" );
}
else if ( "int".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getIntegerValue( " + parserGetter + ", \"" + tagName +
"\", parser, strict ) );" );
}
else if ( "long".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getLongValue( " + parserGetter + ", \"" + tagName +
"\", parser, strict ) );" );
}
else if ( "short".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getShortValue( " + parserGetter + ", \"" + tagName +
"\", parser, strict ) );" );
}
else if ( "byte".equals( type ) )
{
sc.add( objectName + "." + setterName + "( getByteValue( " + parserGetter + ", \"" + tagName +
"\", parser, strict ) );" );
}
else if ( "String".equals( type ) || "Boolean".equals( type ) )
{
// TODO: other Primitive types
sc.add( objectName + "." + setterName + "( " + parserGetter + " );" );
}
else if ( "Date".equals( type ) )
{
sc.add( "String dateFormat = " +
( fieldMetaData.getFormat() != null ? "\"" + fieldMetaData.getFormat() + "\"" : "null" ) + ";" );
sc.add( objectName + "." + setterName + "( getDateValue( " + parserGetter + ", \"" + tagName +
"\", dateFormat, parser ) );" );
}
else if ( "DOM".equals( type ) )
{
jClass.addImport( "org.codehaus.plexus.util.xml.Xpp3DomBuilder" );
sc.add( objectName + "." + setterName + "( Xpp3DomBuilder.build( parser ) );" );
}
else if ("Content".equals( type ))
{
//skip this
}
else
{
throw new IllegalArgumentException( "Unknown type: " + type );
}
}
private void writeParserInitialization( JSourceCode sc )
{
sc.add( "if ( addDefaultEntities )" );
sc.add( "{" );
sc.indent();
sc.add( "
sc.add( "// Latin 1 entities" );
sc.add( "
sc.add( "" );
sc.add( "parser.defineEntityReplacementText( \"nbsp\", \"\\u00a0\" );" );
sc.add( "parser.defineEntityReplacementText( \"iexcl\", \"\\u00a1\" );" );
sc.add( "parser.defineEntityReplacementText( \"cent\", \"\\u00a2\" );" );
sc.add( "parser.defineEntityReplacementText( \"pound\", \"\\u00a3\" );" );
sc.add( "parser.defineEntityReplacementText( \"curren\", \"\\u00a4\" );" );
sc.add( "parser.defineEntityReplacementText( \"yen\", \"\\u00a5\" );" );
sc.add( "parser.defineEntityReplacementText( \"brvbar\", \"\\u00a6\" );" );
sc.add( "parser.defineEntityReplacementText( \"sect\", \"\\u00a7\" );" );
sc.add( "parser.defineEntityReplacementText( \"uml\", \"\\u00a8\" );" );
sc.add( "parser.defineEntityReplacementText( \"copy\", \"\\u00a9\" );" );
sc.add( "parser.defineEntityReplacementText( \"ordf\", \"\\u00aa\" );" );
sc.add( "parser.defineEntityReplacementText( \"laquo\", \"\\u00ab\" );" );
sc.add( "parser.defineEntityReplacementText( \"not\", \"\\u00ac\" );" );
sc.add( "parser.defineEntityReplacementText( \"shy\", \"\\u00ad\" );" );
sc.add( "parser.defineEntityReplacementText( \"reg\", \"\\u00ae\" );" );
sc.add( "parser.defineEntityReplacementText( \"macr\", \"\\u00af\" );" );
sc.add( "parser.defineEntityReplacementText( \"deg\", \"\\u00b0\" );" );
sc.add( "parser.defineEntityReplacementText( \"plusmn\", \"\\u00b1\" );" );
sc.add( "parser.defineEntityReplacementText( \"sup2\", \"\\u00b2\" );" );
sc.add( "parser.defineEntityReplacementText( \"sup3\", \"\\u00b3\" );" );
sc.add( "parser.defineEntityReplacementText( \"acute\", \"\\u00b4\" );" );
sc.add( "parser.defineEntityReplacementText( \"micro\", \"\\u00b5\" );" );
sc.add( "parser.defineEntityReplacementText( \"para\", \"\\u00b6\" );" );
sc.add( "parser.defineEntityReplacementText( \"middot\", \"\\u00b7\" );" );
sc.add( "parser.defineEntityReplacementText( \"cedil\", \"\\u00b8\" );" );
sc.add( "parser.defineEntityReplacementText( \"sup1\", \"\\u00b9\" );" );
sc.add( "parser.defineEntityReplacementText( \"ordm\", \"\\u00ba\" );" );
sc.add( "parser.defineEntityReplacementText( \"raquo\", \"\\u00bb\" );" );
sc.add( "parser.defineEntityReplacementText( \"frac14\", \"\\u00bc\" );" );
sc.add( "parser.defineEntityReplacementText( \"frac12\", \"\\u00bd\" );" );
sc.add( "parser.defineEntityReplacementText( \"frac34\", \"\\u00be\" );" );
sc.add( "parser.defineEntityReplacementText( \"iquest\", \"\\u00bf\" );" );
sc.add( "parser.defineEntityReplacementText( \"Agrave\", \"\\u00c0\" );" );
sc.add( "parser.defineEntityReplacementText( \"Aacute\", \"\\u00c1\" );" );
sc.add( "parser.defineEntityReplacementText( \"Acirc\", \"\\u00c2\" );" );
sc.add( "parser.defineEntityReplacementText( \"Atilde\", \"\\u00c3\" );" );
sc.add( "parser.defineEntityReplacementText( \"Auml\", \"\\u00c4\" );" );
sc.add( "parser.defineEntityReplacementText( \"Aring\", \"\\u00c5\" );" );
sc.add( "parser.defineEntityReplacementText( \"AElig\", \"\\u00c6\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ccedil\", \"\\u00c7\" );" );
sc.add( "parser.defineEntityReplacementText( \"Egrave\", \"\\u00c8\" );" );
sc.add( "parser.defineEntityReplacementText( \"Eacute\", \"\\u00c9\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ecirc\", \"\\u00ca\" );" );
sc.add( "parser.defineEntityReplacementText( \"Euml\", \"\\u00cb\" );" );
sc.add( "parser.defineEntityReplacementText( \"Igrave\", \"\\u00cc\" );" );
sc.add( "parser.defineEntityReplacementText( \"Iacute\", \"\\u00cd\" );" );
sc.add( "parser.defineEntityReplacementText( \"Icirc\", \"\\u00ce\" );" );
sc.add( "parser.defineEntityReplacementText( \"Iuml\", \"\\u00cf\" );" );
sc.add( "parser.defineEntityReplacementText( \"ETH\", \"\\u00d0\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ntilde\", \"\\u00d1\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ograve\", \"\\u00d2\" );" );
sc.add( "parser.defineEntityReplacementText( \"Oacute\", \"\\u00d3\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ocirc\", \"\\u00d4\" );" );
sc.add( "parser.defineEntityReplacementText( \"Otilde\", \"\\u00d5\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ouml\", \"\\u00d6\" );" );
sc.add( "parser.defineEntityReplacementText( \"times\", \"\\u00d7\" );" );
sc.add( "parser.defineEntityReplacementText( \"Oslash\", \"\\u00d8\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ugrave\", \"\\u00d9\" );" );
sc.add( "parser.defineEntityReplacementText( \"Uacute\", \"\\u00da\" );" );
sc.add( "parser.defineEntityReplacementText( \"Ucirc\", \"\\u00db\" );" );
sc.add( "parser.defineEntityReplacementText( \"Uuml\", \"\\u00dc\" );" );
sc.add( "parser.defineEntityReplacementText( \"Yacute\", \"\\u00dd\" );" );
sc.add( "parser.defineEntityReplacementText( \"THORN\", \"\\u00de\" );" );
sc.add( "parser.defineEntityReplacementText( \"szlig\", \"\\u00df\" );" );
sc.add( "parser.defineEntityReplacementText( \"agrave\", \"\\u00e0\" );" );
sc.add( "parser.defineEntityReplacementText( \"aacute\", \"\\u00e1\" );" );
sc.add( "parser.defineEntityReplacementText( \"acirc\", \"\\u00e2\" );" );
sc.add( "parser.defineEntityReplacementText( \"atilde\", \"\\u00e3\" );" );
sc.add( "parser.defineEntityReplacementText( \"auml\", \"\\u00e4\" );" );
sc.add( "parser.defineEntityReplacementText( \"aring\", \"\\u00e5\" );" );
sc.add( "parser.defineEntityReplacementText( \"aelig\", \"\\u00e6\" );" );
sc.add( "parser.defineEntityReplacementText( \"ccedil\", \"\\u00e7\" );" );
sc.add( "parser.defineEntityReplacementText( \"egrave\", \"\\u00e8\" );" );
sc.add( "parser.defineEntityReplacementText( \"eacute\", \"\\u00e9\" );" );
sc.add( "parser.defineEntityReplacementText( \"ecirc\", \"\\u00ea\" );" );
sc.add( "parser.defineEntityReplacementText( \"euml\", \"\\u00eb\" );" );
sc.add( "parser.defineEntityReplacementText( \"igrave\", \"\\u00ec\" );" );
sc.add( "parser.defineEntityReplacementText( \"iacute\", \"\\u00ed\" );" );
sc.add( "parser.defineEntityReplacementText( \"icirc\", \"\\u00ee\" );" );
sc.add( "parser.defineEntityReplacementText( \"iuml\", \"\\u00ef\" );" );
sc.add( "parser.defineEntityReplacementText( \"eth\", \"\\u00f0\" );" );
sc.add( "parser.defineEntityReplacementText( \"ntilde\", \"\\u00f1\" );" );
sc.add( "parser.defineEntityReplacementText( \"ograve\", \"\\u00f2\" );" );
sc.add( "parser.defineEntityReplacementText( \"oacute\", \"\\u00f3\" );" );
sc.add( "parser.defineEntityReplacementText( \"ocirc\", \"\\u00f4\" );" );
sc.add( "parser.defineEntityReplacementText( \"otilde\", \"\\u00f5\" );" );
sc.add( "parser.defineEntityReplacementText( \"ouml\", \"\\u00f6\" );" );
sc.add( "parser.defineEntityReplacementText( \"divide\", \"\\u00f7\" );" );
sc.add( "parser.defineEntityReplacementText( \"oslash\", \"\\u00f8\" );" );
sc.add( "parser.defineEntityReplacementText( \"ugrave\", \"\\u00f9\" );" );
sc.add( "parser.defineEntityReplacementText( \"uacute\", \"\\u00fa\" );" );
sc.add( "parser.defineEntityReplacementText( \"ucirc\", \"\\u00fb\" );" );
sc.add( "parser.defineEntityReplacementText( \"uuml\", \"\\u00fc\" );" );
sc.add( "parser.defineEntityReplacementText( \"yacute\", \"\\u00fd\" );" );
sc.add( "parser.defineEntityReplacementText( \"thorn\", \"\\u00fe\" );" );
sc.add( "parser.defineEntityReplacementText( \"yuml\", \"\\u00ff\" );" );
sc.add( "" );
sc.add( "
sc.add( "// Special entities" );
sc.add( "
sc.add( "" );
// These are required to be handled by the parser by the XML specification
// sc.add( "parser.defineEntityReplacementText( \"quot\", \"\\u0022\" );" );
// sc.add( "parser.defineEntityReplacementText( \"amp\", \"\\u0026\" );" );
// sc.add( "parser.defineEntityReplacementText( \"lt\", \"\\u003c\" );" );
// sc.add( "parser.defineEntityReplacementText( \"gt\", \"\\u003e\" );" );
// sc.add( "parser.defineEntityReplacementText( \"apos\", \"\\u0027\" );" );
sc.add( "parser.defineEntityReplacementText( \"OElig\", \"\\u0152\" );" );
sc.add( "parser.defineEntityReplacementText( \"oelig\", \"\\u0153\" );" );
sc.add( "parser.defineEntityReplacementText( \"Scaron\", \"\\u0160\" );" );
sc.add( "parser.defineEntityReplacementText( \"scaron\", \"\\u0161\" );" );
sc.add( "parser.defineEntityReplacementText( \"Yuml\", \"\\u0178\" );" );
sc.add( "parser.defineEntityReplacementText( \"circ\", \"\\u02c6\" );" );
sc.add( "parser.defineEntityReplacementText( \"tilde\", \"\\u02dc\" );" );
sc.add( "parser.defineEntityReplacementText( \"ensp\", \"\\u2002\" );" );
sc.add( "parser.defineEntityReplacementText( \"emsp\", \"\\u2003\" );" );
sc.add( "parser.defineEntityReplacementText( \"thinsp\", \"\\u2009\" );" );
sc.add( "parser.defineEntityReplacementText( \"zwnj\", \"\\u200c\" );" );
sc.add( "parser.defineEntityReplacementText( \"zwj\", \"\\u200d\" );" );
sc.add( "parser.defineEntityReplacementText( \"lrm\", \"\\u200e\" );" );
sc.add( "parser.defineEntityReplacementText( \"rlm\", \"\\u200f\" );" );
sc.add( "parser.defineEntityReplacementText( \"ndash\", \"\\u2013\" );" );
sc.add( "parser.defineEntityReplacementText( \"mdash\", \"\\u2014\" );" );
sc.add( "parser.defineEntityReplacementText( \"lsquo\", \"\\u2018\" );" );
sc.add( "parser.defineEntityReplacementText( \"rsquo\", \"\\u2019\" );" );
sc.add( "parser.defineEntityReplacementText( \"sbquo\", \"\\u201a\" );" );
sc.add( "parser.defineEntityReplacementText( \"ldquo\", \"\\u201c\" );" );
sc.add( "parser.defineEntityReplacementText( \"rdquo\", \"\\u201d\" );" );
sc.add( "parser.defineEntityReplacementText( \"bdquo\", \"\\u201e\" );" );
sc.add( "parser.defineEntityReplacementText( \"dagger\", \"\\u2020\" );" );
sc.add( "parser.defineEntityReplacementText( \"Dagger\", \"\\u2021\" );" );
sc.add( "parser.defineEntityReplacementText( \"permil\", \"\\u2030\" );" );
sc.add( "parser.defineEntityReplacementText( \"lsaquo\", \"\\u2039\" );" );
sc.add( "parser.defineEntityReplacementText( \"rsaquo\", \"\\u203a\" );" );
sc.add( "parser.defineEntityReplacementText( \"euro\", \"\\u20ac\" );" );
sc.add( "" );
sc.add( "
sc.add( "// Symbol entities" );
sc.add( "
sc.add( "" );
sc.add( "parser.defineEntityReplacementText( \"fnof\", \"\\u0192\" );" );
sc.add( "parser.defineEntityReplacementText( \"Alpha\", \"\\u0391\" );" );
sc.add( "parser.defineEntityReplacementText( \"Beta\", \"\\u0392\" );" );
sc.add( "parser.defineEntityReplacementText( \"Gamma\", \"\\u0393\" );" );
sc.add( "parser.defineEntityReplacementText( \"Delta\", \"\\u0394\" );" );
sc.add( "parser.defineEntityReplacementText( \"Epsilon\", \"\\u0395\" );" );
sc.add( "parser.defineEntityReplacementText( \"Zeta\", \"\\u0396\" );" );
sc.add( "parser.defineEntityReplacementText( \"Eta\", \"\\u0397\" );" );
sc.add( "parser.defineEntityReplacementText( \"Theta\", \"\\u0398\" );" );
sc.add( "parser.defineEntityReplacementText( \"Iota\", \"\\u0399\" );" );
sc.add( "parser.defineEntityReplacementText( \"Kappa\", \"\\u039a\" );" );
sc.add( "parser.defineEntityReplacementText( \"Lambda\", \"\\u039b\" );" );
sc.add( "parser.defineEntityReplacementText( \"Mu\", \"\\u039c\" );" );
sc.add( "parser.defineEntityReplacementText( \"Nu\", \"\\u039d\" );" );
sc.add( "parser.defineEntityReplacementText( \"Xi\", \"\\u039e\" );" );
sc.add( "parser.defineEntityReplacementText( \"Omicron\", \"\\u039f\" );" );
sc.add( "parser.defineEntityReplacementText( \"Pi\", \"\\u03a0\" );" );
sc.add( "parser.defineEntityReplacementText( \"Rho\", \"\\u03a1\" );" );
sc.add( "parser.defineEntityReplacementText( \"Sigma\", \"\\u03a3\" );" );
sc.add( "parser.defineEntityReplacementText( \"Tau\", \"\\u03a4\" );" );
sc.add( "parser.defineEntityReplacementText( \"Upsilon\", \"\\u03a5\" );" );
sc.add( "parser.defineEntityReplacementText( \"Phi\", \"\\u03a6\" );" );
sc.add( "parser.defineEntityReplacementText( \"Chi\", \"\\u03a7\" );" );
sc.add( "parser.defineEntityReplacementText( \"Psi\", \"\\u03a8\" );" );
sc.add( "parser.defineEntityReplacementText( \"Omega\", \"\\u03a9\" );" );
sc.add( "parser.defineEntityReplacementText( \"alpha\", \"\\u03b1\" );" );
sc.add( "parser.defineEntityReplacementText( \"beta\", \"\\u03b2\" );" );
sc.add( "parser.defineEntityReplacementText( \"gamma\", \"\\u03b3\" );" );
sc.add( "parser.defineEntityReplacementText( \"delta\", \"\\u03b4\" );" );
sc.add( "parser.defineEntityReplacementText( \"epsilon\", \"\\u03b5\" );" );
sc.add( "parser.defineEntityReplacementText( \"zeta\", \"\\u03b6\" );" );
sc.add( "parser.defineEntityReplacementText( \"eta\", \"\\u03b7\" );" );
sc.add( "parser.defineEntityReplacementText( \"theta\", \"\\u03b8\" );" );
sc.add( "parser.defineEntityReplacementText( \"iota\", \"\\u03b9\" );" );
sc.add( "parser.defineEntityReplacementText( \"kappa\", \"\\u03ba\" );" );
sc.add( "parser.defineEntityReplacementText( \"lambda\", \"\\u03bb\" );" );
sc.add( "parser.defineEntityReplacementText( \"mu\", \"\\u03bc\" );" );
sc.add( "parser.defineEntityReplacementText( \"nu\", \"\\u03bd\" );" );
sc.add( "parser.defineEntityReplacementText( \"xi\", \"\\u03be\" );" );
sc.add( "parser.defineEntityReplacementText( \"omicron\", \"\\u03bf\" );" );
sc.add( "parser.defineEntityReplacementText( \"pi\", \"\\u03c0\" );" );
sc.add( "parser.defineEntityReplacementText( \"rho\", \"\\u03c1\" );" );
sc.add( "parser.defineEntityReplacementText( \"sigmaf\", \"\\u03c2\" );" );
sc.add( "parser.defineEntityReplacementText( \"sigma\", \"\\u03c3\" );" );
sc.add( "parser.defineEntityReplacementText( \"tau\", \"\\u03c4\" );" );
sc.add( "parser.defineEntityReplacementText( \"upsilon\", \"\\u03c5\" );" );
sc.add( "parser.defineEntityReplacementText( \"phi\", \"\\u03c6\" );" );
sc.add( "parser.defineEntityReplacementText( \"chi\", \"\\u03c7\" );" );
sc.add( "parser.defineEntityReplacementText( \"psi\", \"\\u03c8\" );" );
sc.add( "parser.defineEntityReplacementText( \"omega\", \"\\u03c9\" );" );
sc.add( "parser.defineEntityReplacementText( \"thetasym\", \"\\u03d1\" );" );
sc.add( "parser.defineEntityReplacementText( \"upsih\", \"\\u03d2\" );" );
sc.add( "parser.defineEntityReplacementText( \"piv\", \"\\u03d6\" );" );
sc.add( "parser.defineEntityReplacementText( \"bull\", \"\\u2022\" );" );
sc.add( "parser.defineEntityReplacementText( \"hellip\", \"\\u2026\" );" );
sc.add( "parser.defineEntityReplacementText( \"prime\", \"\\u2032\" );" );
sc.add( "parser.defineEntityReplacementText( \"Prime\", \"\\u2033\" );" );
sc.add( "parser.defineEntityReplacementText( \"oline\", \"\\u203e\" );" );
sc.add( "parser.defineEntityReplacementText( \"frasl\", \"\\u2044\" );" );
sc.add( "parser.defineEntityReplacementText( \"weierp\", \"\\u2118\" );" );
sc.add( "parser.defineEntityReplacementText( \"image\", \"\\u2111\" );" );
sc.add( "parser.defineEntityReplacementText( \"real\", \"\\u211c\" );" );
sc.add( "parser.defineEntityReplacementText( \"trade\", \"\\u2122\" );" );
sc.add( "parser.defineEntityReplacementText( \"alefsym\", \"\\u2135\" );" );
sc.add( "parser.defineEntityReplacementText( \"larr\", \"\\u2190\" );" );
sc.add( "parser.defineEntityReplacementText( \"uarr\", \"\\u2191\" );" );
sc.add( "parser.defineEntityReplacementText( \"rarr\", \"\\u2192\" );" );
sc.add( "parser.defineEntityReplacementText( \"darr\", \"\\u2193\" );" );
sc.add( "parser.defineEntityReplacementText( \"harr\", \"\\u2194\" );" );
sc.add( "parser.defineEntityReplacementText( \"crarr\", \"\\u21b5\" );" );
sc.add( "parser.defineEntityReplacementText( \"lArr\", \"\\u21d0\" );" );
sc.add( "parser.defineEntityReplacementText( \"uArr\", \"\\u21d1\" );" );
sc.add( "parser.defineEntityReplacementText( \"rArr\", \"\\u21d2\" );" );
sc.add( "parser.defineEntityReplacementText( \"dArr\", \"\\u21d3\" );" );
sc.add( "parser.defineEntityReplacementText( \"hArr\", \"\\u21d4\" );" );
sc.add( "parser.defineEntityReplacementText( \"forall\", \"\\u2200\" );" );
sc.add( "parser.defineEntityReplacementText( \"part\", \"\\u2202\" );" );
sc.add( "parser.defineEntityReplacementText( \"exist\", \"\\u2203\" );" );
sc.add( "parser.defineEntityReplacementText( \"empty\", \"\\u2205\" );" );
sc.add( "parser.defineEntityReplacementText( \"nabla\", \"\\u2207\" );" );
sc.add( "parser.defineEntityReplacementText( \"isin\", \"\\u2208\" );" );
sc.add( "parser.defineEntityReplacementText( \"notin\", \"\\u2209\" );" );
sc.add( "parser.defineEntityReplacementText( \"ni\", \"\\u220b\" );" );
sc.add( "parser.defineEntityReplacementText( \"prod\", \"\\u220f\" );" );
sc.add( "parser.defineEntityReplacementText( \"sum\", \"\\u2211\" );" );
sc.add( "parser.defineEntityReplacementText( \"minus\", \"\\u2212\" );" );
sc.add( "parser.defineEntityReplacementText( \"lowast\", \"\\u2217\" );" );
sc.add( "parser.defineEntityReplacementText( \"radic\", \"\\u221a\" );" );
sc.add( "parser.defineEntityReplacementText( \"prop\", \"\\u221d\" );" );
sc.add( "parser.defineEntityReplacementText( \"infin\", \"\\u221e\" );" );
sc.add( "parser.defineEntityReplacementText( \"ang\", \"\\u2220\" );" );
sc.add( "parser.defineEntityReplacementText( \"and\", \"\\u2227\" );" );
sc.add( "parser.defineEntityReplacementText( \"or\", \"\\u2228\" );" );
sc.add( "parser.defineEntityReplacementText( \"cap\", \"\\u2229\" );" );
sc.add( "parser.defineEntityReplacementText( \"cup\", \"\\u222a\" );" );
sc.add( "parser.defineEntityReplacementText( \"int\", \"\\u222b\" );" );
sc.add( "parser.defineEntityReplacementText( \"there4\", \"\\u2234\" );" );
sc.add( "parser.defineEntityReplacementText( \"sim\", \"\\u223c\" );" );
sc.add( "parser.defineEntityReplacementText( \"cong\", \"\\u2245\" );" );
sc.add( "parser.defineEntityReplacementText( \"asymp\", \"\\u2248\" );" );
sc.add( "parser.defineEntityReplacementText( \"ne\", \"\\u2260\" );" );
sc.add( "parser.defineEntityReplacementText( \"equiv\", \"\\u2261\" );" );
sc.add( "parser.defineEntityReplacementText( \"le\", \"\\u2264\" );" );
sc.add( "parser.defineEntityReplacementText( \"ge\", \"\\u2265\" );" );
sc.add( "parser.defineEntityReplacementText( \"sub\", \"\\u2282\" );" );
sc.add( "parser.defineEntityReplacementText( \"sup\", \"\\u2283\" );" );
sc.add( "parser.defineEntityReplacementText( \"nsub\", \"\\u2284\" );" );
sc.add( "parser.defineEntityReplacementText( \"sube\", \"\\u2286\" );" );
sc.add( "parser.defineEntityReplacementText( \"supe\", \"\\u2287\" );" );
sc.add( "parser.defineEntityReplacementText( \"oplus\", \"\\u2295\" );" );
sc.add( "parser.defineEntityReplacementText( \"otimes\", \"\\u2297\" );" );
sc.add( "parser.defineEntityReplacementText( \"perp\", \"\\u22a5\" );" );
sc.add( "parser.defineEntityReplacementText( \"sdot\", \"\\u22c5\" );" );
sc.add( "parser.defineEntityReplacementText( \"lceil\", \"\\u2308\" );" );
sc.add( "parser.defineEntityReplacementText( \"rceil\", \"\\u2309\" );" );
sc.add( "parser.defineEntityReplacementText( \"lfloor\", \"\\u230a\" );" );
sc.add( "parser.defineEntityReplacementText( \"rfloor\", \"\\u230b\" );" );
sc.add( "parser.defineEntityReplacementText( \"lang\", \"\\u2329\" );" );
sc.add( "parser.defineEntityReplacementText( \"rang\", \"\\u232a\" );" );
sc.add( "parser.defineEntityReplacementText( \"loz\", \"\\u25ca\" );" );
sc.add( "parser.defineEntityReplacementText( \"spades\", \"\\u2660\" );" );
sc.add( "parser.defineEntityReplacementText( \"clubs\", \"\\u2663\" );" );
sc.add( "parser.defineEntityReplacementText( \"hearts\", \"\\u2665\" );" );
sc.add( "parser.defineEntityReplacementText( \"diams\", \"\\u2666\" );" );
sc.add( "" );
sc.unindent();
sc.add( "}" );
}
private void writeHelpers( JClass jClass )
{
JMethod method = new JMethod( "getTrimmedValue", new JClass( "String" ), null );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
JSourceCode sc = method.getSourceCode();
sc.add( "if ( s != null )" );
sc.add( "{" );
sc.addIndented( "s = s.trim();" );
sc.add( "}" );
sc.add( "return s;" );
jClass.addMethod( method );
method = new JMethod( "getRequiredAttributeValue", new JClass( "String" ), null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
sc.add( "if ( s == null )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( strict )" );
sc.add( "{" );
sc.addIndented(
"throw new XmlPullParserException( \"Missing required value for attribute '\" + attribute + \"'\", parser, null );" );
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.add( "return s;" );
jClass.addMethod( method );
method = new JMethod( "getBooleanValue", JType.BOOLEAN, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
sc = method.getSourceCode();
sc.add( "return getBooleanValue( s, attribute, parser, null );" );
jClass.addMethod( method );
method = new JMethod( "getBooleanValue", JType.BOOLEAN, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( new JClass( "String" ), "defaultValue" ) );
sc = method.getSourceCode();
sc.add( "if ( s != null && s.length() != 0 )" );
sc.add( "{" );
sc.addIndented( "return Boolean.valueOf( s ).booleanValue();" );
sc.add( "}" );
sc.add( "if ( defaultValue != null )" );
sc.add( "{" );
sc.addIndented( "return Boolean.valueOf( defaultValue ).booleanValue();" );
sc.add( "}" );
sc.add( "return false;" );
jClass.addMethod( method );
method = new JMethod( "getCharacterValue", JType.CHAR, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
sc = method.getSourceCode();
sc.add( "if ( s != null )" );
sc.add( "{" );
sc.addIndented( "return s.charAt( 0 );" );
sc.add( "}" );
sc.add( "return 0;" );
jClass.addMethod( method );
method = new JMethod( "getIntegerValue", JType.INT, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
convertNumericalType( sc, "Integer.valueOf( s ).intValue()", "an integer" );
jClass.addMethod( method );
method = new JMethod( "getShortValue", JType.SHORT, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
convertNumericalType( sc, "Short.valueOf( s ).shortValue()", "a short integer" );
jClass.addMethod( method );
method = new JMethod( "getByteValue", JType.BYTE, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
convertNumericalType( sc, "Byte.valueOf( s ).byteValue()", "a byte" );
jClass.addMethod( method );
method = new JMethod( "getLongValue", JType.LONG, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
convertNumericalType( sc, "Long.valueOf( s ).longValue()", "a long integer" );
jClass.addMethod( method );
method = new JMethod( "getFloatValue", JType.FLOAT, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
convertNumericalType( sc, "Float.valueOf( s ).floatValue()", "a floating point number" );
jClass.addMethod( method );
method = new JMethod( "getDoubleValue", JType.DOUBLE, null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( JClass.BOOLEAN, "strict" ) );
sc = method.getSourceCode();
convertNumericalType( sc, "Double.valueOf( s ).doubleValue()", "a floating point number" );
jClass.addMethod( method );
method = new JMethod( "getDateValue", new JClass( "java.util.Date" ), null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addException( new JClass( "XmlPullParserException" ) );
sc = method.getSourceCode();
sc.add( "return getDateValue( s, attribute, null, parser );" );
jClass.addMethod( method );
method = new JMethod( "getDateValue", new JClass( "java.util.Date" ), null );
method.addException( new JClass( "XmlPullParserException" ) );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "String" ), "s" ) );
method.addParameter( new JParameter( new JClass( "String" ), "attribute" ) );
method.addParameter( new JParameter( new JClass( "String" ), "dateFormat" ) );
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addException( new JClass( "XmlPullParserException" ) );
sc = method.getSourceCode();
sc.add( "if ( s != null )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( dateFormat == null )" );
sc.add( "{" );
sc.addIndented( "return new java.util.Date( Long.valueOf( s ).longValue() );" );
sc.add( "}" );
sc.add( "else" );
sc.add( "{" );
sc.indent();
sc.add( "DateFormat dateParser = new java.text.SimpleDateFormat( dateFormat, Locale.US );" );
sc.add( "try" );
sc.add( "{" );
sc.addIndented( "return dateParser.parse( s );" );
sc.add( "}" );
sc.add( "catch ( java.text.ParseException e )" );
sc.add( "{" );
sc.addIndented( "throw new XmlPullParserException( e.getMessage() );" );
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.add( "return null;" );
jClass.addMethod( method );
method = new JMethod( "checkFieldWithDuplicate", JType.BOOLEAN, null );
method.getModifiers().makePrivate();
method.addParameter( new JParameter( new JClass( "XmlPullParser" ), "parser" ) );
method.addParameter( new JParameter( new JClass( "String" ), "tagName" ) );
method.addParameter( new JParameter( new JClass( "String" ), "alias" ) );
method.addParameter( new JParameter( new JClass( "java.util.Set" ), "parsed" ) );
method.addException( new JClass( "XmlPullParserException" ) );
sc = method.getSourceCode();
sc.add( "if ( !( parser.getName().equals( tagName ) || parser.getName().equals( alias ) ) )" );
sc.add( "{" );
sc.addIndented( "return false;" );
sc.add( "}" );
sc.add( "if ( parsed.contains( tagName ) )" );
sc.add( "{" );
sc.addIndented(
"throw new XmlPullParserException( \"Duplicated tag: '\" + tagName + \"'\", parser, null );" );
sc.add( "}" );
sc.add( "parsed.add( tagName );" );
sc.add( "return true;" );
jClass.addMethod( method );
}
private void convertNumericalType( JSourceCode sc, String expression, String typeDesc )
{
sc.add( "if ( s != null )" );
sc.add( "{" );
sc.indent();
sc.add( "try" );
sc.add( "{" );
sc.addIndented( "return " + expression + ";" );
sc.add( "}" );
sc.add( "catch ( NumberFormatException e )" );
sc.add( "{" );
sc.indent();
sc.add( "if ( strict )" );
sc.add( "{" );
sc.addIndented( "throw new XmlPullParserException( \"Unable to parse element '\" + attribute + \"', must be " +
typeDesc + "\", parser, null );" );
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.unindent();
sc.add( "}" );
sc.add( "return 0;" );
}
}
|
package org.navalplanner.web.planner.allocation;
import static org.navalplanner.web.I18nHelper._;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.WeakHashMap;
import java.util.concurrent.Callable;
import org.joda.time.DateTime;
import org.joda.time.LocalDate;
import org.navalplanner.business.planner.entities.AggregateOfResourceAllocations;
import org.navalplanner.business.planner.entities.GenericResourceAllocation;
import org.navalplanner.business.planner.entities.ResourceAllocation;
import org.navalplanner.business.planner.entities.SpecificResourceAllocation;
import org.navalplanner.web.common.ViewSwitcher;
import org.navalplanner.web.resourceload.ResourceLoadModel;
import org.zkoss.ganttz.timetracker.ICellForDetailItemRenderer;
import org.zkoss.ganttz.timetracker.IConvertibleToColumn;
import org.zkoss.ganttz.timetracker.PairOfLists;
import org.zkoss.ganttz.timetracker.TimeTrackedTable;
import org.zkoss.ganttz.timetracker.TimeTrackedTableWithLeftPane;
import org.zkoss.ganttz.timetracker.TimeTracker;
import org.zkoss.ganttz.timetracker.TimeTrackerComponentWithoutColumns;
import org.zkoss.ganttz.timetracker.zoom.DetailItem;
import org.zkoss.ganttz.util.Interval;
import org.zkoss.zk.ui.Component;
import org.zkoss.zk.ui.event.Event;
import org.zkoss.zk.ui.event.EventListener;
import org.zkoss.zk.ui.event.Events;
import org.zkoss.zk.ui.util.GenericForwardComposer;
import org.zkoss.zul.Combobox;
import org.zkoss.zul.Div;
import org.zkoss.zul.Grid;
import org.zkoss.zul.Intbox;
import org.zkoss.zul.Label;
import org.zkoss.zul.api.Column;
public class AdvancedAllocationController extends GenericForwardComposer {
public interface IAdvanceAllocationResultReceiver {
public void accepted(AllocationResult modifiedAllocationResult);
public void cancel();
}
private Div insertionPointTimetracker;
private Div insertionPointLeftPanel;
private Div insertionPointRightPanel;
private TimeTracker timeTracker;
private TimeTrackerComponentWithoutColumns timeTrackerComponent;
private Grid leftPane;
private TimeTrackedTable<Row> table;
private final ViewSwitcher switcher;
private final AllocationResult allocationResult;
private final IAdvanceAllocationResultReceiver resultReceiver;
public AdvancedAllocationController(ViewSwitcher switcher,
AllocationResult allocationResult,
IAdvanceAllocationResultReceiver resultReceiver) {
this.switcher = switcher;
this.allocationResult = allocationResult;
this.resultReceiver = resultReceiver;
}
@Override
public void doAfterCompose(Component comp) throws Exception {
super.doAfterCompose(comp);
createComponents();
insertComponentsInLayout();
timeTrackerComponent.afterCompose();
table.afterCompose();
}
private void createComponents() {
timeTracker = new TimeTracker(intervalFromData());
timeTrackerComponent = new TimeTrackerComponentWithoutColumns(
timeTracker, "timeTracker");
TimeTrackedTableWithLeftPane<Row, Row> timeTrackedTableWithLeftPane = new TimeTrackedTableWithLeftPane<Row, Row>(
getDataSource(), getColumnsForLeft(), getLeftRenderer(),
getRightRenderer(), timeTracker);
table = timeTrackedTableWithLeftPane.getRightPane();
leftPane = timeTrackedTableWithLeftPane.getLeftPane();
}
private void insertComponentsInLayout() {
insertionPointRightPanel.appendChild(table);
insertionPointLeftPanel.appendChild(leftPane);
insertionPointTimetracker.appendChild(timeTrackerComponent);
}
public void onClick$acceptButton() {
switcher.goToPlanningOrderView();
resultReceiver.accepted(allocationResult);
}
public void onClick$cancelButton() {
switcher.goToPlanningOrderView();
resultReceiver.cancel();
}
public void onClick$zoomIncrease() {
timeTracker.zoomIncrease();
}
public void onClick$zoomDecrease() {
timeTracker.zoomDecrease();
}
private List<Row> rowsCached = null;
private List<Row> getRows() {
if (rowsCached != null) {
return rowsCached;
}
rowsCached = new ArrayList<Row>();
Row groupingRow = buildGroupingRow();
rowsCached.add(groupingRow);
List<Row> genericRows = genericRows();
groupingRow.listenTo(genericRows);
rowsCached.addAll(genericRows);
List<Row> specificRows = specificRows();
groupingRow.listenTo(specificRows);
rowsCached.addAll(specificRows);
return rowsCached;
}
private List<Row> specificRows() {
List<Row> result = new ArrayList<Row>();
for (SpecificResourceAllocation specificResourceAllocation : allocationResult
.getSpecificAllocations()) {
result.add(createSpecificRow(specificResourceAllocation));
}
return result;
}
private Row createSpecificRow(
SpecificResourceAllocation specificResourceAllocation) {
return Row.createRow(specificResourceAllocation.getResource()
.getDescription(), 1, Arrays
.asList(specificResourceAllocation));
}
private List<Row> genericRows() {
List<Row> result = new ArrayList<Row>();
for (GenericResourceAllocation genericResourceAllocation : allocationResult
.getGenericAllocations()) {
result.add(buildGenericRow(genericResourceAllocation));
}
return result;
}
private Row buildGenericRow(
GenericResourceAllocation genericResourceAllocation) {
return Row.createRow(ResourceLoadModel
.getName(genericResourceAllocation.getCriterions()), 1, Arrays
.asList(genericResourceAllocation));
}
private Row buildGroupingRow() {
String taskName = allocationResult.getTask().getName();
Row groupingRow = Row.createRow(taskName + " (task)", 0,
allocationResult
.getAllSortedByStartDate());
return groupingRow;
}
private ICellForDetailItemRenderer<ColumnOnRow, Row> getLeftRenderer() {
return new ICellForDetailItemRenderer<ColumnOnRow, Row>() {
@Override
public Component cellFor(ColumnOnRow column, Row row) {
return column.cellFor(row);
}
};
}
private List<ColumnOnRow> getColumnsForLeft() {
List<ColumnOnRow> result = new ArrayList<ColumnOnRow>();
result.add(new ColumnOnRow(_("Name")) {
@Override
public Component cellFor(Row row) {
return row.getNameLabel();
}
});
result.add(new ColumnOnRow(_("Hours")) {
@Override
public Component cellFor(Row row) {
return row.getAllHours();
}
});
result.add(new ColumnOnRow(_("Function")) {
@Override
public Component cellFor(Row row) {
return row.getFunction();
}
});
return result;
}
private Callable<PairOfLists<Row, Row>> getDataSource() {
return new Callable<PairOfLists<Row, Row>>() {
@Override
public PairOfLists<Row, Row> call() throws Exception {
List<Row> rows = getRows();
return new PairOfLists<Row, Row>(rows, rows);
}
};
}
private ICellForDetailItemRenderer<DetailItem, Row> getRightRenderer() {
return new ICellForDetailItemRenderer<DetailItem, Row>() {
@Override
public Component cellFor(DetailItem item, Row data) {
return data.hoursOnInterval(item);
}
};
}
private Interval intervalFromData() {
List<ResourceAllocation<?>> all = allocationResult
.getAllSortedByStartDate();
if (all.isEmpty()) {
return new Interval(allocationResult.getTask().getStartDate(),
allocationResult.getTask().getEndDate());
} else {
LocalDate start = all.get(0).getStartDate();
LocalDate end = getEnd(all);
return new Interval(asDate(start), asDate(end));
}
}
private LocalDate getEnd(List<ResourceAllocation<?>> all) {
ArrayList<ResourceAllocation<?>> reversed = reverse(all);
LocalDate end = reversed.get(0).getEndDate();
ListIterator<ResourceAllocation<?>> listIterator = reversed
.listIterator(1);
while (listIterator.hasNext()) {
ResourceAllocation<?> current = listIterator.next();
if (current.getEndDate().compareTo(end) >= 0) {
end = current.getEndDate();
} else {
return end;
}
}
return end;
}
private ArrayList<ResourceAllocation<?>> reverse(
List<ResourceAllocation<?>> all) {
ArrayList<ResourceAllocation<?>> reversed = new ArrayList<ResourceAllocation<?>>(
all);
Collections.reverse(reversed);
return reversed;
}
private Date asDate(LocalDate start) {
return start.toDateMidnight().toDate();
}
}
abstract class ColumnOnRow implements IConvertibleToColumn {
private final String columnName;
ColumnOnRow(String columnName) {
this.columnName = columnName;
}
public abstract Component cellFor(Row row);
@Override
public Column toColumn() {
Column column = new org.zkoss.zul.Column();
column.setLabel(columnName);
return column;
}
public String getName() {
return columnName;
}
}
interface CellChangedListener {
public void changeOn(DetailItem detailItem);
public void changeOnGlobal();
}
class Row {
static Row createRow(String name, int level,
List<? extends ResourceAllocation<?>> allocations) {
return new Row(name, level, allocations);
}
void listenTo(Collection<Row> rows) {
for (Row row : rows) {
listenTo(row);
}
}
void listenTo(Row row) {
row.add(new CellChangedListener() {
@Override
public void changeOnGlobal() {
reloadAllHours();
}
@Override
public void changeOn(DetailItem detailItem) {
Component component = componentsByDetailItem.get(detailItem);
if (component == null) {
return;
}
reloadHoursOnInterval(component, detailItem);
reloadAllHours();
}
});
}
private Component allHoursInput;
private Label nameLabel;
private List<CellChangedListener> listeners = new ArrayList<CellChangedListener>();
private Map<DetailItem, Component> componentsByDetailItem = new WeakHashMap<DetailItem, Component>();
void add(CellChangedListener listener) {
listeners.add(listener);
}
private void fireCellChanged(DetailItem detailItem) {
for (CellChangedListener cellChangedListener : listeners) {
cellChangedListener.changeOn(detailItem);
}
}
private void fireCellChanged() {
for (CellChangedListener cellChangedListener : listeners) {
cellChangedListener.changeOnGlobal();
}
}
Component getAllHours() {
if (allHoursInput == null) {
allHoursInput = buildAllHours();
reloadAllHours();
addListenerIfNeeded(allHoursInput);
}
return allHoursInput;
}
private Component buildAllHours() {
return isGroupingRow() ? new Label() : new Intbox();
}
private void addListenerIfNeeded(Component allHoursComponent) {
if (isGroupingRow()) {
return;
}
Intbox intbox = (Intbox) allHoursComponent;
intbox.addEventListener(Events.ON_CHANGE, new EventListener() {
@Override
public void onEvent(Event event) throws Exception {
fireCellChanged();
}
});
}
private void reloadAllHours() {
if (isGroupingRow()) {
Label label = (Label) allHoursInput;
label.setValue(aggregate.getTotalHours() + "");
} else {
Intbox intbox = (Intbox) allHoursInput;
intbox.setValue(aggregate.getTotalHours());
}
}
Component getFunction() {
if (isGroupingRow()) {
return new Label();
} else {
Combobox combobox = new Combobox();
return combobox;
}
}
Component getNameLabel() {
if (nameLabel == null) {
nameLabel = new Label();
nameLabel.setValue(name);
}
return nameLabel;
}
private String name;
private int level;
private final AggregateOfResourceAllocations aggregate;
private Row(String name, int level,
List<? extends ResourceAllocation<?>> allocations) {
this.name = name;
this.level = level;
this.aggregate = new AggregateOfResourceAllocations(
new ArrayList<ResourceAllocation<?>>(allocations));
}
private Integer getHoursForDetailItem(DetailItem item) {
DateTime startDate = item.getStartDate();
DateTime endDate = item.getEndDate();
return this.aggregate.hoursBetween(startDate.toLocalDate(), endDate
.toLocalDate());
}
Component hoursOnInterval(DetailItem item) {
Component result = isGroupingRow() ? new Label() : new Intbox();
reloadHoursOnInterval(result, item);
componentsByDetailItem.put(item, result);
addListenerIfNeeded(item, result);
return result;
}
private void addListenerIfNeeded(final DetailItem item, Component component) {
if (isGroupingRow()) {
return;
}
component.addEventListener(Events.ON_CHANGE, new EventListener() {
@Override
public void onEvent(Event event) throws Exception {
fireCellChanged(item);
}
});
}
private void reloadHoursOnInterval(Component component, DetailItem item) {
if (isGroupingRow()) {
Label label = (Label) component;
label.setValue(getHoursForDetailItem(item) + "");
} else {
Intbox intbox = (Intbox) component;
intbox.setValue(getHoursForDetailItem(item));
}
}
private boolean isGroupingRow() {
return aggregate.getAllocationsSortedByStartDate().size() > 1;
}
}
|
package org.eclipse.mylar.internal.context.ui.actions;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.WeakHashMap;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.Platform;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.text.ITextSelection;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.StructuredViewer;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.viewers.ViewerFilter;
import org.eclipse.mylar.context.core.ContextCorePlugin;
import org.eclipse.mylar.context.core.MylarStatusHandler;
import org.eclipse.mylar.context.ui.ContextUiPlugin;
import org.eclipse.mylar.context.ui.InterestFilter;
import org.eclipse.mylar.internal.context.ui.ContextUiImages;
import org.eclipse.mylar.monitor.MylarMonitorPlugin;
import org.eclipse.swt.widgets.Event;
import org.eclipse.ui.IActionDelegate2;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.ISelectionListener;
import org.eclipse.ui.IViewActionDelegate;
import org.eclipse.ui.IViewPart;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.IWorkbenchWindowActionDelegate;
import org.eclipse.ui.PlatformUI;
/**
* Extending this class makes it possible to apply Mylar management to a
* structured view (e.g. to provide interest-based filtering).
*
* @author Mik Kersten
*/
public abstract class AbstractFocusViewAction extends Action implements IViewActionDelegate, IActionDelegate2,
ISelectionListener {
private static final String ACTION_LABEL = "Apply Mylar";
public static final String PREF_ID_PREFIX = "org.eclipse.mylar.ui.interest.filter.";
protected String globalPrefId;
protected IAction initAction = null;
protected final InterestFilter interestFilter;
protected IViewPart viewPart;
protected Map<StructuredViewer, List<ViewerFilter>> previousFilters = new WeakHashMap<StructuredViewer, List<ViewerFilter>>();
private boolean manageViewer = true;
private boolean manageFilters = true;
private boolean manageLinking = false;
private boolean wasLinkingEnabled = false;
private static Map<IViewPart, AbstractFocusViewAction> partMap = new WeakHashMap<IViewPart, AbstractFocusViewAction>();
public static AbstractFocusViewAction getActionForPart(IViewPart part) {
return partMap.get(part);
}
public IViewPart getPartForAction() {
if (viewPart == null) {
if (this instanceof IWorkbenchWindowActionDelegate) {
if (Platform.isRunning()) {
throw new RuntimeException("not supported on IWorkbenchWindowActionDelegate");
}
} else {
throw new RuntimeException("error: viewPart is null");
}
}
return viewPart;
}
public AbstractFocusViewAction(InterestFilter interestFilter, boolean manageViewer, boolean manageFilters,
boolean manageLinking) {
super();
this.interestFilter = interestFilter;
this.manageViewer = manageViewer;
this.manageFilters = manageFilters;
this.manageLinking = manageLinking;
setText(ACTION_LABEL);
setToolTipText(ACTION_LABEL);
setImageDescriptor(ContextUiImages.INTEREST_FILTERING);
}
public void init(IAction action) {
initAction = action;
setChecked(action.isChecked());
}
public void init(IViewPart view) {
String id = view.getSite().getId();
globalPrefId = PREF_ID_PREFIX + id;
viewPart = view;
partMap.put(view, this);
}
public void run(IAction action) {
setChecked(action.isChecked());
valueChanged(action, action.isChecked(), true);
}
/**
* Don't update if the preference has not been initialized.
*/
public void update() {
if (globalPrefId != null) {
update(ContextUiPlugin.getDefault().getPreferenceStore().getBoolean(globalPrefId));
}
}
/**
* This operation is expensive.
*/
public void update(boolean on) {
valueChanged(initAction, on, false);
}
protected void valueChanged(IAction action, final boolean on, boolean store) {
if (PlatformUI.getWorkbench().isClosing()) {
return;
}
boolean wasPaused = ContextCorePlugin.getContextManager().isContextCapturePaused();
try {
if (!wasPaused) {
ContextCorePlugin.getContextManager().setContextCapturePaused(true);
}
setChecked(on);
action.setChecked(on);
if (store && ContextCorePlugin.getDefault() != null) {
ContextUiPlugin.getDefault().getPreferenceStore().setValue(globalPrefId, on);
}
List<StructuredViewer> viewers = getViewers();
for (StructuredViewer viewer : viewers) {
if (viewPart != null && !viewer.getControl().isDisposed() && manageViewer) {
ContextUiPlugin.getDefault().getViewerManager().addManagedViewer(viewer, viewPart);
}
updateInterestFilter(on, viewer);
}
if (manageLinking) {
updateLinking(on);
if (!on && viewers.size() == 1) {
StructuredViewer structuredViewer = viewers.get(0);
if (structuredViewer instanceof TreeViewer) {
((TreeViewer)structuredViewer).collapseAll();
}
}
}
} catch (Throwable t) {
MylarStatusHandler.fail(t, "Could not install viewer manager on: " + globalPrefId, false);
} finally {
if (!wasPaused) {
ContextCorePlugin.getContextManager().setContextCapturePaused(false);
}
}
}
private void updateLinking(boolean on) {
if (on) {
wasLinkingEnabled = isDefaultLinkingEnabled();
setDefaultLinkingEnabled(false);
MylarMonitorPlugin.getDefault().addWindowPostSelectionListener(this);
} else {
MylarMonitorPlugin.getDefault().removeWindowPostSelectionListener(this);
setDefaultLinkingEnabled(wasLinkingEnabled);
}
}
public void selectionChanged(IWorkbenchPart part, ISelection selection) {
if (manageLinking && selection instanceof ITextSelection && part instanceof IEditorPart) {
try {
List<StructuredViewer> viewers = getViewers();
if (viewers.size() == 1) {
StructuredViewer viewer = getViewers().get(0);
ITextSelection textSelection = (ITextSelection) selection;
ISelection toSelect = resolveSelection((IEditorPart)part, textSelection, viewer);
if (toSelect != null) {
ISelection currentSelection = viewer.getSelection();
if (!selection.equals(currentSelection)) {
select(viewer, toSelect);
}
}
}
} catch (Throwable t) {
// ignore, linking failure is not fatal
}
}
}
protected void select(StructuredViewer viewer, ISelection selection) {
viewer.setSelection(selection, true);
}
/**
* Override to provide managed linking
*/
protected ISelection resolveSelection(IEditorPart part, ITextSelection selection, StructuredViewer viewer)
throws CoreException {
return null;
}
/**
* Override to provide managed linking
*/
protected void setDefaultLinkingEnabled(boolean on) {
// ignore
}
/**
* Override to provide managed linking
*/
protected boolean isDefaultLinkingEnabled() {
return false;
}
public void selectionChanged(IAction action, ISelection selection) {
// ignore
}
/**
* Public for testing
*/
public void updateInterestFilter(final boolean on, StructuredViewer viewer) {
if (viewer != null) {
if (on) {
installInterestFilter(viewer);
ContextUiPlugin.getDefault().getViewerManager().addFilteredViewer(viewer);
} else {
ContextUiPlugin.getDefault().getViewerManager().removeFilteredViewer(viewer);
uninstallInterestFilter(viewer);
}
}
}
/**
* Public for testing
*/
public abstract List<StructuredViewer> getViewers();
/**
* @return filters that should not be removed when the interest filter is
* installed
*/
private Set<Class<?>> getPreservedFilters() {
return ContextUiPlugin.getDefault().getPreservedFilterClasses(viewPart.getSite().getId());
}
protected boolean installInterestFilter(StructuredViewer viewer) {
if (viewer == null) {
MylarStatusHandler.log("The viewer to install InterestFilter is null", this);
return false;
} else if (viewer.getControl().isDisposed() && manageViewer) {
// TODO: do this with part listener, not lazily?
return false;
}
try {
viewer.getControl().setRedraw(false);
previousFilters.put(viewer, Arrays.asList(viewer.getFilters()));
if (viewPart != null && manageFilters) {
Set<Class<?>> excludedFilters = getPreservedFilters();
for (ViewerFilter filter : previousFilters.get(viewer)) {
if (!excludedFilters.contains(filter.getClass())) {
try {
viewer.removeFilter(filter);
} catch (Throwable t) {
MylarStatusHandler.fail(t, "Failed to remove filter: " + filter, false);
}
}
}
}
viewer.addFilter(interestFilter);
if (viewer instanceof TreeViewer) {
((TreeViewer) viewer).expandAll();
}
viewer.getControl().setRedraw(true);
return true;
} catch (Throwable t) {
t.printStackTrace();
MylarStatusHandler.fail(t, "Could not install viewer filter on: " + globalPrefId, false);
}
return false;
}
protected void uninstallInterestFilter(StructuredViewer viewer) {
if (viewer == null) {
MylarStatusHandler.log("Could not uninstall interest filter", this);
return;
} else if (viewer.getControl().isDisposed()) {
// TODO: do this with part listener, not lazily?
ContextUiPlugin.getDefault().getViewerManager().removeManagedViewer(viewer, viewPart);
return;
}
viewer.getControl().setRedraw(false);
if (viewPart != null && manageFilters) {
Set<Class<?>> excludedFilters = getPreservedFilters();
if (previousFilters.containsKey(viewer)) {
for (ViewerFilter filter : previousFilters.get(viewer)) {
if (!excludedFilters.contains(filter.getClass())) {
try {
viewer.addFilter(filter);
} catch (Throwable t) {
MylarStatusHandler.fail(t, "Failed to remove filter: " + filter, false);
}
}
}
previousFilters.remove(viewer);
}
}
for (ViewerFilter filter : Arrays.asList(viewer.getFilters())) {
if (filter instanceof InterestFilter) {
viewer.removeFilter(interestFilter);
}
}
viewer.getControl().setRedraw(true);
}
public void dispose() {
partMap.remove(getPartForAction());
if (viewPart != null && !PlatformUI.getWorkbench().isClosing()) {
for (StructuredViewer viewer : getViewers()) {
ContextUiPlugin.getDefault().getViewerManager().removeManagedViewer(viewer, viewPart);
}
}
MylarMonitorPlugin.getDefault().removeWindowPostSelectionListener(this);
}
public void runWithEvent(IAction action, Event event) {
run(action);
}
public String getGlobalPrefId() {
return globalPrefId;
}
/**
* For testing.
*/
public InterestFilter getInterestFilter() {
return interestFilter;
}
}
|
package org.eclipse.mylyn.internal.jira.core.service.web.rss;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Locale;
import org.apache.commons.lang.StringEscapeUtils;
import org.eclipse.mylyn.internal.jira.core.model.Attachment;
import org.eclipse.mylyn.internal.jira.core.model.Comment;
import org.eclipse.mylyn.internal.jira.core.model.Component;
import org.eclipse.mylyn.internal.jira.core.model.CustomField;
import org.eclipse.mylyn.internal.jira.core.model.Issue;
import org.eclipse.mylyn.internal.jira.core.model.IssueLink;
import org.eclipse.mylyn.internal.jira.core.model.Project;
import org.eclipse.mylyn.internal.jira.core.model.SecurityLevel;
import org.eclipse.mylyn.internal.jira.core.model.Subtask;
import org.eclipse.mylyn.internal.jira.core.model.Version;
import org.eclipse.mylyn.internal.jira.core.model.filter.IssueCollector;
import org.eclipse.mylyn.internal.jira.core.service.JiraClient;
import org.eclipse.mylyn.monitor.core.StatusHandler;
import org.xml.sax.Attributes;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
public class RssContentHandler extends DefaultHandler {
private static final SimpleDateFormat XML_DATE_FORMAT = new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss Z (zz)", Locale.US); //$NON-NLS-1$
private static final SimpleDateFormat XML_DUE_DATE_FORMAT = new SimpleDateFormat("E, dd MMM yyyy HH:mm:ss", Locale.US); //$NON-NLS-1$
private static final String CREATED_ATTR = "created"; //$NON-NLS-1$
private static final String LEVEL_ATTR = "level"; //$NON-NLS-1$
private static final String AUTHOR_ATTR = "author"; //$NON-NLS-1$
private static final String ID_ATTR = "id"; //$NON-NLS-1$
private static final String KEY_ATTR = "key"; //$NON-NLS-1$
private static final String USERNAME_ATTR = "username"; //$NON-NLS-1$
private static final String SECONDS_ATTR = "seconds"; //$NON-NLS-1$
private static final String NAME_ATTR = "name"; //$NON-NLS-1$
private static final String SIZE_ATTR = "size"; //$NON-NLS-1$
private static final String RSS = "rss"; //$NON-NLS-1$
private static final String CHANNEL = "channel"; //$NON-NLS-1$
private static final String ITEM = "item"; //$NON-NLS-1$
private static final String COMMENTS = "comments"; //$NON-NLS-1$
private static final String COMMENT = "comment"; //$NON-NLS-1$
private static final String VOTES = "votes"; //$NON-NLS-1$
private static final String ORIGINAL_ESTIMATE = "timeoriginalestimate"; //$NON-NLS-1$
private static final String CURRENT_ESTIMATE = "timeestimate"; //$NON-NLS-1$
private static final String ACTUAL = "timespent"; //$NON-NLS-1$
private static final String SUBTASKS = "subtasks"; //$NON-NLS-1$
private static final String SUBTASK = "subtask"; //$NON-NLS-1$
private static final String ATTACHMENTS = "attachments"; //$NON-NLS-1$
private static final String ATTACHMENT = "attachment"; //$NON-NLS-1$
private static final String DUE = "due"; //$NON-NLS-1$
private static final String COMPONENT = "component"; //$NON-NLS-1$
private static final String FIX_VERSION = "fixVersion"; //$NON-NLS-1$
private static final String VERSION = "version"; //$NON-NLS-1$
private static final String UPDATED = "updated"; //$NON-NLS-1$
private static final String CREATED = "created"; //$NON-NLS-1$
private static final String REPORTER = "reporter"; //$NON-NLS-1$
private static final String ASSIGNEE = "assignee"; //$NON-NLS-1$
private static final String RESOLUTION = "resolution"; //$NON-NLS-1$
private static final String STATUS = "status"; //$NON-NLS-1$
private static final String PRIORITY = "priority"; //$NON-NLS-1$
private static final String TYPE = "type"; //$NON-NLS-1$
private static final String SUMMARY = "summary"; //$NON-NLS-1$
private static final String KEY = "key"; //$NON-NLS-1$
private static final String PARENT = "parent"; //$NON-NLS-1$
private static final String ENVIRONMENT = "environment"; //$NON-NLS-1$
private static final String DESCRIPTION = "description"; //$NON-NLS-1$
private static final String LINK = "link"; //$NON-NLS-1$
private static final String TITLE = "title"; //$NON-NLS-1$
private static final String CUSTOM_FIELDS = "customfields"; //$NON-NLS-1$
private static final String CUSTOM_FIELD = "customfield"; //$NON-NLS-1$
private static final String CUSTOM_FIELD_NAME = "customfieldname"; //$NON-NLS-1$
private static final String CUSTOM_FIELD_VALUES = "customfieldvalues"; //$NON-NLS-1$
private static final String CUSTOM_FIELD_VALUE = "customfieldvalue"; //$NON-NLS-1$
private static final String ISSUE_LINKS = "issuelinks"; //$NON-NLS-1$
private static final String ISSUE_LINK_TYPE = "issuelinktype"; //$NON-NLS-1$
private static final String ISSUE_LINK_NAME = "name"; //$NON-NLS-1$
private static final String INWARD_LINKS = "inwardlinks"; //$NON-NLS-1$
private static final String OUTWARD_LINKS = "outwardlinks"; //$NON-NLS-1$
private static final String ISSUE_LINK = "issuelink"; //$NON-NLS-1$
private static final String ISSUE_KEY = "issuekey"; //$NON-NLS-1$
private static final String SECURITY = "security";
private static final int START = 0;
private static final int LOOKING_FOR_CHANNEL = 1;
private static final int LOOKING_FOR_ITEM = 2;
private static final int IN_ITEM = 3;
private static final int IN_COMMENTS = 4;
private static final int IN_CUSTOM_FIELDS = 5;
private static final int IN_CUSTOM_FIELD = 6;
private static final int IN_CUSTOM_FIELD_VALUES = 7;
private static final int IN_ISSUE_LINKS = 8;
private static final int IN_ISSUE_LINK_TYPE = 9;
private static final int IN_XWARDS_LINKS = 10;
private static final int IN_XWARDS_ISSUE_LINK = 12;
private static final int IN_ATTACHMENTS = 14;
private static final int IN_CUSTOM_FIELD_NAME = 15;
private static final int IN_CUSTOM_FIELD_VALUE = 16;
private static final int IN_SUBTASKS = 17;
int state = START;
private StringBuffer currentElementText;
private final JiraClient client;
private final IssueCollector collector;
private Issue currentIssue;
private String commentAuthor;
private String commentLevel;
private Date commentDate;
private ArrayList<Comment> currentComments = new ArrayList<Comment>();
private ArrayList<Version> currentFixVersions = null;
private ArrayList<Version> currentReportedVersions = null;
private ArrayList<Component> currentComponents = null;
private ArrayList<Attachment> currentAttachments = new ArrayList<Attachment>();
private ArrayList<CustomField> currentCustomFields = new ArrayList<CustomField>();
private String currentSubtaskId;
private ArrayList<Subtask> currentSubtasks = new ArrayList<Subtask>();
private String currentIssueLinkTypeId;
private String currentIssueLinkTypeName;
private String currentIssueLinkInwardDescription;
private String currentIssueLinkOutwardDescription;
private String currentIssueLinkIssueId;
private ArrayList<IssueLink> currentIssueLinks = new ArrayList<IssueLink>();
private String customFieldId;
private String customFieldKey;
private String customFieldName;
private ArrayList<String> customFieldValues = new ArrayList<String>();
private String attachmentId;
private String attachmentName;
private long attachmentSize;
private String attachmentAuthor;
private Date attachmentCreated;
/**
* Creates a new RSS reader that will create issues from the RSS information by querying the local Jira Server for
* any missing information. Issues will be published to <code>collector</code> as they are read from the stream.
*
* @param client
* Jira server we are listing the issues of. This can either be a locally cached jira server or a
* connection to a live instance.
* @param collector
* Collecter that will be processing the issues as they are read from the RSS feed.
* @param baseUrl
* the base URL of the repository
*/
public RssContentHandler(JiraClient client, IssueCollector collector, String baseUrl) {
this.client = client;
this.collector = collector;
}
@Override
public void startDocument() throws SAXException {
state = START;
currentElementText = new StringBuffer(256);
collector.start();
}
@Override
public void endDocument() throws SAXException {
if (state != START) {
// ignore
}
this.collector.done();
// remove unused buffers
currentElementText = null;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes) throws SAXException {
currentElementText.setLength(0);
if (collector.isCancelled()) {
throw new ParseCancelledException("User canceled operation");
}
switch (state) {
case START:
if (RSS.equals(localName)) {
state = LOOKING_FOR_CHANNEL;
}
break;
case LOOKING_FOR_CHANNEL:
if (CHANNEL.equals(localName)) {
state = LOOKING_FOR_ITEM;
}
break;
case LOOKING_FOR_ITEM:
if (ITEM.equals(localName)) {
state = IN_ITEM;
currentIssue = new Issue();
}
break;
case IN_ITEM:
if (KEY.equals(localName)) {
currentIssue.setId(attributes.getValue(ID_ATTR));
} else if (PARENT.equals(localName)) {
currentIssue.setParentId(attributes.getValue(ID_ATTR));
} else if (TYPE.equals(localName)) {
currentIssue.setType(client.getIssueTypeById(attributes.getValue(ID_ATTR)));
} else if (PRIORITY.equals(localName)) {
currentIssue.setPriority(client.getPriorityById(attributes.getValue(ID_ATTR)));
} else if (STATUS.equals(localName)) {
currentIssue.setStatus(client.getStatusById(attributes.getValue(ID_ATTR)));
} else if (ASSIGNEE.equals(localName)) {
String assigneeName = attributes.getValue(USERNAME_ATTR);
currentIssue.setAssignee(assigneeName);
} else if (REPORTER.equals(localName)) {
String reporterName = attributes.getValue(USERNAME_ATTR);
currentIssue.setReporter(reporterName);
} else if (RESOLUTION.equals(localName)) {
String resolutionId = attributes.getValue(ID_ATTR);
currentIssue.setResolution(resolutionId != null ? client.getResolutionById(resolutionId) : null);
} else if (ORIGINAL_ESTIMATE.equals(localName)) {
currentIssue.setInitialEstimate(Long.parseLong(attributes.getValue(SECONDS_ATTR)));
} else if (CURRENT_ESTIMATE.equals(localName)) {
currentIssue.setEstimate(Long.parseLong(attributes.getValue(SECONDS_ATTR)));
} else if (ACTUAL.equals(localName)) {
currentIssue.setActual(Long.parseLong(attributes.getValue(SECONDS_ATTR)));
} else if (SECURITY.equals(localName)) {
SecurityLevel securityLevel = new SecurityLevel();
securityLevel.setId(attributes.getValue(ID_ATTR));
currentIssue.setSecurityLevel(securityLevel);
}
if (COMMENTS.equals(localName)) {
state = IN_COMMENTS;
} else if (ISSUE_LINKS.equals(localName)) {
state = IN_ISSUE_LINKS;
} else if (SUBTASKS.equals(localName)) {
state = IN_SUBTASKS;
} else if (CUSTOM_FIELDS.equals(localName)) {
state = IN_CUSTOM_FIELDS;
} else if (ATTACHMENTS.equals(localName)) {
state = IN_ATTACHMENTS;
}
break;
case IN_COMMENTS:
if (COMMENT.equals(localName)) {
commentAuthor = attributes.getValue(AUTHOR_ATTR);
commentLevel = attributes.getValue(LEVEL_ATTR);
commentDate = convertToDate(attributes.getValue(CREATED_ATTR));
}
break;
case IN_ISSUE_LINKS:
if (ISSUE_LINK_TYPE.equals(localName)) {
state = IN_ISSUE_LINK_TYPE;
currentIssueLinkTypeId = attributes.getValue(ID_ATTR);
}
break;
case IN_ISSUE_LINK_TYPE:
if (ISSUE_LINK_NAME.equals(localName)) {
} else if (INWARD_LINKS.equals(localName)) {
currentIssueLinkInwardDescription = attributes.getValue(DESCRIPTION);
state = IN_XWARDS_LINKS;
} else if (OUTWARD_LINKS.equals(localName)) {
currentIssueLinkOutwardDescription = attributes.getValue(DESCRIPTION);
state = IN_XWARDS_LINKS;
}
break;
case IN_XWARDS_LINKS:
if (ISSUE_LINK.equals(localName)) {
state = IN_XWARDS_ISSUE_LINK;
}
break;
case IN_XWARDS_ISSUE_LINK:
if (ISSUE_KEY.equals(localName)) {
currentIssueLinkIssueId = attributes.getValue(ID_ATTR);
}
break;
case IN_CUSTOM_FIELDS:
if (CUSTOM_FIELD.equals(localName)) {
customFieldId = attributes.getValue(ID_ATTR);
customFieldKey = attributes.getValue(KEY_ATTR);
state = IN_CUSTOM_FIELD;
}
break;
case IN_CUSTOM_FIELD:
if (CUSTOM_FIELD_NAME.equals(localName)) {
state = IN_CUSTOM_FIELD_NAME;
} else if (CUSTOM_FIELD_VALUES.equals(localName)) {
state = IN_CUSTOM_FIELD_VALUES;
}
break;
case IN_CUSTOM_FIELD_VALUES:
if (CUSTOM_FIELD_VALUE.equals(localName)) {
state = IN_CUSTOM_FIELD_VALUE;
}
break;
case IN_ATTACHMENTS:
if (ATTACHMENT.equals(localName)) {
attachmentId = attributes.getValue(ID_ATTR);
attachmentName = attributes.getValue(NAME_ATTR);
attachmentSize = Long.parseLong(attributes.getValue(SIZE_ATTR));
attachmentAuthor = attributes.getValue(AUTHOR_ATTR);
attachmentCreated = convertToDate(attributes.getValue(CREATED_ATTR));
}
break;
case IN_SUBTASKS:
if (SUBTASK.equals(localName)) {
currentSubtaskId = attributes.getValue(ID_ATTR);
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
switch (state) {
case IN_SUBTASKS:
if (SUBTASK.equals(localName)) {
currentSubtasks.add(new Subtask(currentSubtaskId, getCurrentElementText()));
currentSubtaskId = null;
} else if (SUBTASKS.equals(localName)) {
state = IN_ITEM;
}
break;
case IN_ATTACHMENTS:
if (ATTACHMENTS.equals(localName)) {
state = IN_ITEM;
} else if (ATTACHMENT.equals(localName)) {
Attachment attachment = new Attachment(attachmentId, attachmentName, attachmentSize, attachmentAuthor,
attachmentCreated);
currentAttachments.add(attachment);
}
break;
case IN_CUSTOM_FIELD_VALUE:
if (CUSTOM_FIELD_VALUE.equals(localName)) {
customFieldValues.add(getCurrentElementText());
state = IN_CUSTOM_FIELD_VALUES;
}
break;
case IN_CUSTOM_FIELD_VALUES:
if (CUSTOM_FIELD_VALUES.equals(localName)) {
if (customFieldValues.size() == 0) {
customFieldValues.add(getCurrentElementText());
}
state = IN_CUSTOM_FIELD;
}
break;
case IN_CUSTOM_FIELD_NAME:
if (CUSTOM_FIELD_NAME.equals(localName)) {
customFieldName = getCurrentElementText();
state = IN_CUSTOM_FIELD;
}
break;
case IN_CUSTOM_FIELD:
if (CUSTOM_FIELD.equals(localName)) {
currentCustomFields.add(new CustomField(customFieldId, customFieldKey, customFieldName,
customFieldValues));
customFieldId = null;
customFieldKey = null;
customFieldName = null;
customFieldValues.clear();
state = IN_CUSTOM_FIELDS;
}
break;
case IN_CUSTOM_FIELDS:
if (CUSTOM_FIELDS.equals(localName)) {
state = IN_ITEM;
}
break;
case IN_XWARDS_ISSUE_LINK:
if (ISSUE_LINK.equals(localName)) {
String key = getCurrentElementText().trim();
IssueLink link = new IssueLink(currentIssueLinkIssueId, key, currentIssueLinkTypeId,
currentIssueLinkTypeName, currentIssueLinkInwardDescription, currentIssueLinkOutwardDescription);
currentIssueLinks.add(link);
currentIssueLinkIssueId = null;
state = IN_XWARDS_LINKS;
}
break;
case IN_XWARDS_LINKS:
if (OUTWARD_LINKS.equals(localName) || INWARD_LINKS.equals(localName)) {
state = IN_ISSUE_LINK_TYPE;
currentIssueLinkOutwardDescription = null;
currentIssueLinkInwardDescription = null;
}
break;
case IN_ISSUE_LINK_TYPE:
if (ISSUE_LINK_TYPE.equals(localName)) {
currentIssueLinkTypeName = null;
state = IN_ISSUE_LINKS;
} else if (ISSUE_LINK_NAME.equals(localName)) {
currentIssueLinkTypeName = getCurrentElementText().trim();
}
break;
case IN_ISSUE_LINKS:
if (ISSUE_LINKS.equals(localName)) {
state = IN_ITEM;
}
break;
case IN_COMMENTS:
if (COMMENTS.equals(localName)) {
state = IN_ITEM;
} else if (COMMENT.equals(localName)) {
Comment comment = new Comment(getCurrentElementTextEscapeHtml(), commentAuthor, commentLevel, commentDate);
currentComments.add(comment);
}
break;
case IN_ITEM:
if (CHANNEL.equals(localName)) {
state = LOOKING_FOR_CHANNEL;
} else if (ITEM.equals(localName)) {
if (currentReportedVersions != null) {
currentIssue.setReportedVersions(currentReportedVersions.toArray(new Version[currentReportedVersions.size()]));
}
if (currentFixVersions != null) {
currentIssue.setFixVersions(currentFixVersions.toArray(new Version[currentFixVersions.size()]));
}
if (currentComponents != null) {
currentIssue.setComponents(currentComponents.toArray(new Component[currentComponents.size()]));
}
currentIssue.setComments(currentComments.toArray(new Comment[currentComments.size()]));
currentIssue.setAttachments(currentAttachments.toArray(new Attachment[currentAttachments.size()]));
currentIssue.setCustomFields(currentCustomFields.toArray(new CustomField[currentCustomFields.size()]));
currentIssue.setSubtasks(currentSubtasks.toArray(new Subtask[currentSubtasks.size()]));
currentIssue.setIssueLinks(currentIssueLinks.toArray(new IssueLink[currentIssueLinks.size()]));
collector.collectIssue(currentIssue);
currentIssue = null;
currentIssueLinks.clear();
currentSubtasks.clear();
currentCustomFields.clear();
currentAttachments.clear();
currentComments.clear();
currentFixVersions = null;
currentReportedVersions = null;
currentComponents = null;
state = LOOKING_FOR_ITEM;
} else if (TITLE.equals(localName)) {
} else if (LINK.equals(localName)) {
} else if (DESCRIPTION.equals(localName)) {
currentIssue.setDescription(getCurrentElementTextEscapeHtml());
} else if (ENVIRONMENT.equals(localName)) {
currentIssue.setEnvironment(getCurrentElementText());
} else if (KEY.equals(localName)) {
String key = getCurrentElementText();
currentIssue.setKey(key);
currentIssue.setUrl(client.getBaseUrl() + "/browse/" + key);
// TODO super dodgey to assume the project from the issue key
String projectKey = key.substring(0, key.lastIndexOf('-'));
Project project = client.getProjectByKey(projectKey);
if (project == null) {
//throw new SAXException("No project with key '" + projectKey + "' found");
break;
}
currentIssue.setProject(project);
} else if (PARENT.equals(localName)) {
currentIssue.setParentKey(getCurrentElementText());
} else if (SUMMARY.equals(localName)) {
currentIssue.setSummary(getCurrentElementText());
} else if (CREATED.equals(localName)) {
currentIssue.setCreated(convertToDate(getCurrentElementText()));
} else if (UPDATED.equals(localName)) {
currentIssue.setUpdated(convertToDate(getCurrentElementText()));
} else if (VERSION.equals(localName)) {
if (currentIssue.getProject() == null) {
//throw new SAXException("Issue " + currentIssue.getId() + " does not have a valid project");
break;
}
Version version = currentIssue.getProject().getVersion(getCurrentElementText());
// TODO add better handling of unknown versions
if (version != null) {
// throw new SAXException("No version with name '" + getCurrentElementText() + "' found");
if (currentReportedVersions == null) {
currentReportedVersions = new ArrayList<Version>();
}
currentReportedVersions.add(version);
}
} else if (FIX_VERSION.equals(localName)) {
if (currentIssue.getProject() == null) {
//throw new SAXException("Issue " + currentIssue.getId() + " does not have a valid project");
break;
}
Version version = currentIssue.getProject().getVersion(getCurrentElementText());
// TODO add better handling of unknown versions
if (version != null) {
// throw new SAXException("No version with name '" + getCurrentElementText() + "' found");
if (currentFixVersions == null) {
currentFixVersions = new ArrayList<Version>();
}
currentFixVersions.add(version);
}
} else if (COMPONENT.equals(localName)) {
if (currentIssue.getProject() == null) {
//throw new SAXException("Issue " + currentIssue.getId() + " does not have a valid project");
break;
}
Component component = currentIssue.getProject().getComponent(getCurrentElementText());
// TODO add better handling of unknown components
if (component != null) {
// throw new SAXException("No component with name '" + getCurrentElementText() + "' found");
if (currentComponents == null) {
currentComponents = new ArrayList<Component>();
}
currentComponents.add(component);
}
} else if (DUE.equals(localName)) {
currentIssue.setDue(convertToDueDate(getCurrentElementText()));
} else if (VOTES.equals(localName)) {
if (getCurrentElementText().length() > 0) {
try {
currentIssue.setVotes(Integer.parseInt(getCurrentElementText()));
} catch (NumberFormatException e) {
StatusHandler.log(e, "Error while parsing number of votes");
}
}
} else if (SECURITY.equals(localName)) {
SecurityLevel securityLevel = currentIssue.getSecurityLevel();
if (securityLevel != null) {
securityLevel.setName(getCurrentElementText());
}
} else if (TYPE.equals(localName)) {
} else if (PRIORITY.equals(localName)) {
} else if (STATUS.equals(localName)) {
} else if (ASSIGNEE.equals(localName)) {
} else if (REPORTER.equals(localName)) {
} else if (RESOLUTION.equals(localName)) {
} else if (ORIGINAL_ESTIMATE.equals(localName)) {
} else if (CURRENT_ESTIMATE.equals(localName)) {
} else if (ACTUAL.equals(localName)) {
} else {
// ignore
}
break;
case LOOKING_FOR_ITEM:
if (CHANNEL.equals(localName)) {
state = LOOKING_FOR_CHANNEL;
}
break;
case LOOKING_FOR_CHANNEL:
if (RSS.equals(localName)) {
state = START;
}
break;
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
currentElementText.append(ch, start, length);
}
private static Date convertToDate(String value) {
if (value == null || value.length() == 0) {
return null;
}
try {
return XML_DATE_FORMAT.parse(value);
} catch (ParseException e) {
StatusHandler.log(e, "Error while parsing date string " + value);
return null;
}
}
private static Date convertToDueDate(String value) {
if (value == null || value.length() == 0) {
return null;
}
try {
return XML_DUE_DATE_FORMAT.parse(value);
} catch (ParseException e) {
StatusHandler.log(e, "Error while parsing due date string " + value);
return null;
}
}
private String getCurrentElementText() {
String unescaped = currentElementText.toString();
unescaped = StringEscapeUtils.unescapeXml(unescaped);
return unescaped;
}
private String getCurrentElementTextEscapeHtml() {
String unescaped = currentElementText.toString();
unescaped = unescaped.replaceAll("\n", "");
unescaped = unescaped.replaceAll("<br/>", "\n");
unescaped = unescaped.replaceAll(" ", " ");
unescaped = StringEscapeUtils.unescapeXml(unescaped);
return unescaped;
}
}
|
package org.pocketcampus.plugin.freeroom.server;
import static org.pocketcampus.platform.launcher.server.PCServerConfig.PC_SRV_CONFIG;
import java.net.HttpURLConnection;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.thrift.TException;
import org.pocketcampus.platform.sdk.server.database.ConnectionManager;
import org.pocketcampus.platform.sdk.server.database.handlers.exceptions.ServerException;
import org.pocketcampus.plugin.freeroom.server.exchange.ExchangeServiceImpl;
import org.pocketcampus.plugin.freeroom.server.utils.FetchRoomsDetails;
import org.pocketcampus.plugin.freeroom.server.utils.OccupancySorted;
import org.pocketcampus.plugin.freeroom.server.utils.Utils;
import org.pocketcampus.plugin.freeroom.shared.ActualOccupation;
import org.pocketcampus.plugin.freeroom.shared.AutoCompleteReply;
import org.pocketcampus.plugin.freeroom.shared.AutoCompleteRequest;
import org.pocketcampus.plugin.freeroom.shared.FRPeriod;
import org.pocketcampus.plugin.freeroom.shared.FRReply;
import org.pocketcampus.plugin.freeroom.shared.FRRequest;
import org.pocketcampus.plugin.freeroom.shared.FRRoom;
import org.pocketcampus.plugin.freeroom.shared.FreeRoomService;
import org.pocketcampus.plugin.freeroom.shared.ImWorkingReply;
import org.pocketcampus.plugin.freeroom.shared.ImWorkingRequest;
import org.pocketcampus.plugin.freeroom.shared.LogMessage;
import org.pocketcampus.plugin.freeroom.shared.Occupancy;
import org.pocketcampus.plugin.freeroom.shared.WhoIsWorkingReply;
import org.pocketcampus.plugin.freeroom.shared.WhoIsWorkingRequest;
import org.pocketcampus.plugin.freeroom.shared.WorkingOccupancy;
import org.pocketcampus.plugin.freeroom.shared.utils.FRTimes;
/**
* The actual implementation of the server side of the FreeRoom Plugin.
*
* It responds to different types of request from the clients.
*
* @author FreeRoom Project Team - Julien WEBER <julien.weber@epfl.ch> and
* Valentin MINDER <valentin.minder@epfl.ch>
*
*/
public class FreeRoomServiceImpl implements FreeRoomService.Iface {
private final int LIMIT_AUTOCOMPLETE = 50;
private ConnectionManager connMgr;
private ExchangeServiceImpl mExchangeService;
private Logger logger = Logger.getLogger(FreeRoomServiceImpl.class
.getName());
private SimpleDateFormat dateLogFormat = new SimpleDateFormat(
"MMM dd,yyyy HH:mm");
// be careful when changing this, it might lead to invalid data already
// stored !
// this is what is used to differentiate a room from a student occupation in
// the DB.
public enum OCCUPANCY_TYPE {
ROOM, USER;
};
// used to differentiate android log and server logs.
private enum LOG_SIDE {
ANDROID, SERVER;
};
public FreeRoomServiceImpl() {
System.out.println("Starting FreeRoom plugin server ... V2");
logger.setLevel(Level.INFO);
try {
connMgr = new ConnectionManager(PC_SRV_CONFIG.getString("DB_URL")
+ "?allowMultiQueries=true",
PC_SRV_CONFIG.getString("DB_USERNAME"),
PC_SRV_CONFIG.getString("DB_PASSWORD"));
} catch (ServerException e) {
log(LOG_SIDE.SERVER, Level.SEVERE,
"Server cannot connect to the database");
e.printStackTrace();
}
mExchangeService = new ExchangeServiceImpl(
PC_SRV_CONFIG.getString("DB_URL") + "?allowMultiQueries=true",
PC_SRV_CONFIG.getString("DB_USERNAME"),
PC_SRV_CONFIG.getString("DB_PASSWORD"), this);
// update ewa : should be done periodically...
boolean updateEWA = false;
if (updateEWA) {
if (mExchangeService.updateEWAOccupancy()) {
System.out.println("EWA data succesfully updated!");
} else {
System.err.println("EWA data couldn't be completely loaded!");
}
}
boolean updateRoomsDetails = false;
if (updateRoomsDetails) {
FetchRoomsDetails details = new FetchRoomsDetails(
PC_SRV_CONFIG.getString("DB_URL")
+ "?allowMultiQueries=true",
PC_SRV_CONFIG.getString("DB_USERNAME"),
PC_SRV_CONFIG.getString("DB_PASSWORD"));
System.out.println(details.fetchRoomsIntoDB()
+ " rooms inserted/updated");
}
}
/**
* Logging function, time of the log will be set to the current timestamp.
*
* @param type
* Indicates from where the log comes from (i.e android, server
* ...)
* @param level
* Level of the bug (e.g Level.SEVERE, Level.WARNING ...)
* @param message
* Content of the logging message
*/
private void log(LOG_SIDE type, Level level, String message) {
log(type, level, message, System.currentTimeMillis());
}
/**
* Logging function.
*
* @param type
* Indicates from where the log comes from (i.e android, server
* ...)
* @param level
* Level of the bug (e.g Level.SEVERE, Level.WARNING ...)
* @param message
* Content of the logging message
* @param timestamp
* The time of the bug, might be different from the time when it
* is called because log messages can come from various devices
* (e.g android)
*/
private void log(LOG_SIDE type, Level level, String message, long timestamp) {
logger.log(level,
"[" + type.toString() + "] " + dateLogFormat.format(timestamp)
+ " : " + message);
}
/**
* This method's job is to ensure the data are stored in a proper way.
* Whenever you need to insert an occupancy you should call this one. The
* start of a user occupancy should be a full hour (e.g 10h00). Timestamps
* may be modified before insertion in the following ways : seconds and
* milliseconds are set to 0, users occupancies are rounded to a full hour.
*
* @param period
* The period of the occupancy
* @param type
* Type of the occupancy (for instance user or room occupancy)
* @param room
* The room, the object has to contains the UID
* @return true if the occupancy has been well inserted, false otherwise.
*/
public boolean insertOccupancy(FRPeriod period, OCCUPANCY_TYPE type,
FRRoom room, String hash) {
// putting seconds and milliseconds to zero
period.setTimeStampStart(Utils.roundSAndMSToZero(period
.getTimeStampStart()));
period.setTimeStampEnd(Utils.roundSAndMSToZero(period.getTimeStampEnd()));
boolean allowInsert = true;
/**
* the previous room is useful in case of an update. If we have to
* update the user occupancy, we need to know which room will be
* replaced to adjust its count field
*/
String prevRoom = null;
if (type == OCCUPANCY_TYPE.USER) {
// round user occupancy to a full hour
period.setTimeStampStart(Utils.roundToNearestHalfHourBefore(period
.getTimeStampStart()));
prevRoom = checkMultipleSubmissionUserOccupancy(period, room, hash);
}
if (allowInsert) {
boolean inserted = insertAndCheckOccupancyRoom(period, room, type,
hash, prevRoom);
log(LOG_SIDE.SERVER, Level.INFO,
"Inserting occupancy " + type.toString() + " for room "
+ room.getDoorCode() + " : " + inserted);
return inserted;
} else {
log(LOG_SIDE.SERVER,
Level.WARNING,
"Client already said he was working in "
+ room.getDoorCode());
return false;
}
}
/**
* This method checks whether the user has already submitted something for
* the same period, which is not allowed.
*
* @param period
* When the user submits its occupancy
* @param room
* The room in which the user occupancy will be counted
* @param hash
* The hash must be unique for each user and shouldn't depends on
* time.
* @return true if the user occupancy is allowed and can be stored, false
* otherwise.
*/
// TODO eventually do not user exact timestamp but allow margin even in
// queries ?
private String checkMultipleSubmissionUserOccupancy(FRPeriod period,
FRRoom room, String hash) {
long tsStart = period.getTimeStampStart();
String checkRequest = "SELECT COUNT(*) AS count, co.uid "
+ "FROM `fr-checkOccupancy` co "
+ "WHERE co.timestampStart = ? AND hash = ?";
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
PreparedStatement checkQuery = connectBDD
.prepareStatement(checkRequest);
checkQuery.setLong(1, tsStart);
checkQuery.setString(2, hash);
ResultSet checkResult = checkQuery.executeQuery();
if (checkResult.next()) {
int count = checkResult.getInt("count");
if (count == 0) {
return null;
} else {
return checkResult.getString("uid");
}
} else {
return null;
}
} catch (SQLException e) {
log(LOG_SIDE.SERVER,
Level.SEVERE,
"SQL error when checking multiple submissions of user occupancy start = "
+ period.getTimeStampStart() + " end = "
+ period.getTimeStampEnd() + " uid = "
+ room.getUid());
e.printStackTrace();
return null;
}
}
/**
* Insert an occupancy in the database. It checks if there are no overlaps
* between rooms occupancies.
*
* @param period
* The period of the occupancy
* @param room
* The room of the occupancy
* @param typeToInsert
* Specify the type of occupancy (USER, ROOM)
* @param hash
* The unique hash for each user, used to store an entry in the
* checkOccupancy table to avoid multiple submissions for the
* same period from an user
* @param prevRoom
* The uid of the previous room stored in the checkOccupancy
* table (if one), otherwise null
* @return Return true if the occupancy has been successfully stored in the
* database, false otherwise.
*/
private boolean insertAndCheckOccupancyRoom(FRPeriod period, FRRoom room,
OCCUPANCY_TYPE typeToInsert, String hash, String prevRoom) {
long tsStart = period.getTimeStampStart();
long tsEnd = period.getTimeStampEnd();
boolean userOccupation = (typeToInsert == OCCUPANCY_TYPE.USER) ? true
: false;
// first check if you can fully insert it (no other overlapping
// occupancy of rooms)
String checkRequest = "SELECT * FROM `fr-occupancy` oc "
+ "WHERE ((oc.timestampStart < ? AND oc.timestampStart > ?) "
+ "OR (oc.timestampEnd > ? AND oc.timestampEnd < ?) "
+ "OR (oc.timestampStart > ? AND oc.timestampEnd < ?)) AND oc.uid = ?";
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
PreparedStatement checkQuery = connectBDD
.prepareStatement(checkRequest);
checkQuery.setLong(1, tsEnd);
checkQuery.setLong(2, tsStart);
checkQuery.setLong(3, tsStart);
checkQuery.setLong(4, tsEnd);
checkQuery.setLong(5, tsStart);
checkQuery.setLong(6, tsEnd);
checkQuery.setString(7, room.getUid());
ResultSet checkResult = checkQuery.executeQuery();
while (checkResult.next()) {
OCCUPANCY_TYPE type = OCCUPANCY_TYPE.valueOf(checkResult
.getString("type"));
String uid = checkResult.getString("uid");
// if we have a match and this is a room occupancy, we cannot go
// further there is an overlap
if (typeToInsert == OCCUPANCY_TYPE.ROOM
&& type == OCCUPANCY_TYPE.ROOM) {
log(LOG_SIDE.SERVER, Level.WARNING,
"Error during insertion of occupancy, overlapping of two rooms occupancy, "
+ "want to insert : " + room.getUid()
+ " have conflict with " + uid);
return false;
}
}
// and now insert it !
if (!userOccupation) {
return insertOccupancyInDB(room.getUid(), tsStart, tsEnd,
OCCUPANCY_TYPE.ROOM, 0);
} else {
boolean overallInsertion = true;
long hourSharpBefore = Utils.roundHourBefore(tsStart);
long numberHours = Utils.determineNumberHour(tsStart, tsEnd);
for (int i = 0; i < numberHours; ++i) {
// also insert in the check table to prevent further submit
// during the same period from the same user
insertCheckOccupancyInDB(room.getUid(), hourSharpBefore + i
* Utils.ONE_HOUR_MS, hash, prevRoom);
overallInsertion = overallInsertion
&& insertOccupancyInDB(room.getUid(),
hourSharpBefore + i * Utils.ONE_HOUR_MS,
hourSharpBefore + (i + 1)
* Utils.ONE_HOUR_MS,
OCCUPANCY_TYPE.USER, 1);
}
return overallInsertion;
}
} catch (SQLException e) {
e.printStackTrace();
log(LOG_SIDE.SERVER,
Level.SEVERE,
"SQL error when checking and inserting occupancies in DB for room = "
+ room.getUid() + " start = "
+ period.getTimeStampStart() + " end = "
+ period.getTimeStampEnd() + " hash = " + hash
+ " type = " + typeToInsert.toString());
return false;
}
}
/**
* Insert an entry in the database which is used to deny multiple submits of
* user occupancies. It should not be called without pre-checking. If you
* want to insert a new occupancy call public insertOccupancy(...)
*
* @param uid
* The uid of the room
* @param tsStart
* The start of the user occupancy
* @param hash
* The unique hash per user
* @param prevRoom
* The uid of the previous room beeing stored in the
* checkOccupancy table (if one) null otherwise
*/
private void insertCheckOccupancyInDB(String uid, long tsStart,
String hash, String prevRoom) {
String insertRequest = "INSERT INTO `fr-checkOccupancy` (uid, timestampStart, hash) "
+ "VALUES (?, ?, ?) ON DUPLICATE KEY UPDATE uid = ?";
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
PreparedStatement insertQuery = connectBDD
.prepareStatement(insertRequest);
insertQuery.setString(1, uid);
insertQuery.setLong(2, tsStart);
insertQuery.setString(3, hash);
insertQuery.setString(4, uid);
int update = insertQuery.executeUpdate();
if (update > 1) {
// we have updated the current row, thus we also need to adjust
// the count value in fr-occupancy table
decrementUserOccupancyCount(prevRoom, tsStart);
}
} catch (SQLException e) {
log(LOG_SIDE.SERVER, Level.SEVERE,
"SQL error when writing check Occupancy for uid = " + uid
+ " hash = " + hash + " start = " + tsStart);
e.printStackTrace();
}
}
/**
* Decrement the count of users in the room by one
*
* @param uid
* The room to update
* @param tsStart
* The period to update
*/
private void decrementUserOccupancyCount(String uid, long tsStart) {
if (uid == null) {
return;
}
String updateRequest = "UPDATE `fr-occupancy` co SET co.count = co.count - 1 "
+ "WHERE co.uid = ? AND co.timestampStart = ?";
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
PreparedStatement insertQuery = connectBDD
.prepareStatement(updateRequest);
insertQuery.setString(1, uid);
insertQuery.setLong(2, tsStart);
int update = insertQuery.executeUpdate();
System.out.println(update + " rooms updated (decrement)");
} catch (SQLException e) {
log(LOG_SIDE.SERVER, Level.SEVERE,
"SQL error when updating (decrement by one) user occupancy for uid = "
+ uid + " start = " + tsStart);
e.printStackTrace();
}
}
/**
* Insert a given occupancy in the database, if there is a duplicate key,
* the count field is incremented by one.
*
* @param uid
* The unique id of the room
* @param tsStart
* The start of the period
* @param tsEnd
* The end of the period
* @param type
* The type of occupancy (USER, ROOM)
* @param count
* The count associated to the occupancy
* @return If the query was successfull, false otherwise.
*/
private boolean insertOccupancyInDB(String uid, long tsStart, long tsEnd,
OCCUPANCY_TYPE type, int count) {
String insertRequest = "INSERT INTO `fr-occupancy` (uid, timestampStart, timestampEnd, type, count) "
+ "VALUES (?, ?, ?, ?, ?) "
+ "ON DUPLICATE KEY UPDATE count = count + 1";
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
PreparedStatement insertQuery = connectBDD
.prepareStatement(insertRequest);
insertQuery.setString(1, uid);
insertQuery.setLong(2, tsStart);
insertQuery.setLong(3, tsEnd);
insertQuery.setString(4, type.toString());
insertQuery.setInt(5, count);
insertQuery.execute();
return true;
} catch (SQLException e) {
log(LOG_SIDE.SERVER, Level.SEVERE,
"SQL error when inserting occupancy in DB, uid = " + uid
+ " type = " + type.toString() + " start = "
+ tsStart + " end = " + tsEnd);
e.printStackTrace();
return false;
}
}
// for test purposes ONLY
public FreeRoomServiceImpl(ConnectionManager conn) {
System.out.println("Starting TEST FreeRoom plugin server ...");
connMgr = conn;
}
/**
* Get the occupancy of a given period, for a specific list of rooms of for
* any rooms. See thrift file for further informations about what can be
* requested.
*/
@Override
public FRReply getOccupancy(FRRequest request) throws TException {
FRReply reply = new FRReply(HttpURLConnection.HTTP_OK,
HttpURLConnection.HTTP_OK + "");
// round the given period to half hours to have a nice display on UI.
FRPeriod period = request.getPeriod();
long tsStart = Utils.roundToNearestHalfHourBefore(period
.getTimeStampStart());
long tsEnd = Utils
.roundToNearestHalfHourAfter(period.getTimeStampEnd());
int group = request.getUserGroup();
if (!FRTimes.validCalendars(period)) {
// if something is wrong in the request
return new FRReply(HttpURLConnection.HTTP_BAD_REQUEST,
"Bad timestamps! Your client sent a bad request, sorry");
}
boolean onlyFreeRoom = request.isOnlyFreeRooms();
List<String> uidList = request.getUidList();
HashMap<String, List<Occupancy>> occupancies = null;
if (uidList == null || uidList.isEmpty()) {
if (onlyFreeRoom) {
// we want to look into all the rooms
occupancies = getOccupancyOfAnyFreeRoom(onlyFreeRoom, tsStart,
tsEnd, group);
} else {
return new FRReply(HttpURLConnection.HTTP_BAD_REQUEST,
"The search for any free room must contains onlyFreeRoom = true");
}
} else {
// or the user specified a specific list of rooms he wants to check
occupancies = getOccupancyOfSpecificRoom(uidList, onlyFreeRoom,
tsStart, tsEnd, group);
}
occupancies = sortRooms(occupancies);
reply.setOccupancyOfRooms(occupancies);
return reply;
}
/**
* The HashMap is organized by the following relation(building -> list of
* rooms) and each list of rooms is sorted independently. Sort the rooms
* according to some criterias. See the comparator roomsFreeComparator.
*
* @param occ
* The HashMap to be sorted
* @return The HashMap sorted
*/
private HashMap<String, List<Occupancy>> sortRooms(
HashMap<String, List<Occupancy>> occ) {
if (occ == null) {
return null;
}
for (String key : occ.keySet()) {
List<Occupancy> value = occ.get(key);
Collections.sort(value, roomsFreeComparator);
}
return occ;
}
/**
* Comparator used to sort rooms according to some criterias. First put the
* rooms entirely free , then the partially occupied and then the rooms
* unavailable. Entirely free rooms are sorted by probable occupancy
* (users), partially occupied are sorted first by percentage of room
* occupation (i.e how many hours compared to the total period the room is
* occupied) then by probable occupancy (users).
*/
private Comparator<Occupancy> roomsFreeComparator = new Comparator<Occupancy>() {
@Override
public int compare(Occupancy o0, Occupancy o1) {
boolean onlyFree1 = !o0.isIsAtLeastOccupiedOnce();
boolean onlyFree2 = !o1.isIsAtLeastOccupiedOnce();
boolean occupied1 = o0.isIsAtLeastOccupiedOnce();
boolean occupied2 = o1.isIsAtLeastOccupiedOnce();
boolean notFree1 = !onlyFree1 && occupied1;
boolean notFree2 = !onlyFree2 && occupied2;
if (onlyFree1 && onlyFree2) {
return compareOnlyFree(o0.getRatioWorstCaseProbableOccupancy(),
o1.getRatioWorstCaseProbableOccupancy());
} else if (onlyFree1 && !onlyFree2) {
return -1;
} else if (!onlyFree1 && onlyFree2) {
return 1;
} else if (occupied1 && occupied2) {
double rate1 = rateOccupied(o0.getOccupancy());
double rate2 = rateOccupied(o1.getOccupancy());
return comparePartiallyOccupied(rate1, rate2,
o0.getRatioWorstCaseProbableOccupancy(),
o1.getRatioWorstCaseProbableOccupancy());
} else if (occupied1 && notFree2) {
return -1;
} else if (notFree1 && occupied2) {
return 1;
} else {
return 0;
}
}
private int comparePartiallyOccupied(double rate1, double rate2,
double prob1, double prob2) {
if (rate1 == rate2) {
return equalPartiallyOccupied(prob1, prob2);
} else if (rate1 < rate2) {
return -1;
} else {
return 1;
}
}
private int equalPartiallyOccupied(double prob1, double prob2) {
if (prob1 < prob2) {
return -1;
} else if (prob1 > prob2) {
return 1;
}
return 0;
}
/**
* Count the number of hours in the ActualOccupation given
*
* @param acc
* The ActualOccupation to be counted.
* @return The number of hours in the ActualOccupation
*/
private int countNumberHour(ActualOccupation acc) {
long tsStart = acc.getPeriod().getTimeStampStart();
long tsEnd = acc.getPeriod().getTimeStampEnd();
Calendar mCalendar = Calendar.getInstance();
mCalendar.setTimeInMillis(tsStart);
int startHour = mCalendar.get(Calendar.HOUR_OF_DAY);
mCalendar.setTimeInMillis(tsEnd);
int endHour = mCalendar.get(Calendar.HOUR_OF_DAY);
return Math.abs(endHour - startHour);
}
private double rateOccupied(List<ActualOccupation> occupations) {
int count = 0;
int total = 0;
for (ActualOccupation acc : occupations) {
int nbHours = countNumberHour(acc);
if (!acc.isAvailable()) {
count += nbHours;
}
total += nbHours;
}
return total > 0 ? (double) count / total : 0.0;
}
private int compareOnlyFree(double prob1, double prob2) {
if (prob1 < prob2) {
return -1;
} else if (prob1 > prob2) {
return +1;
}
return 0;
}
};
/**
* Return the occupancy of all the free rooms during a given period.
*
* @param onlyFreeRooms
* Should always be true
* @param tsStart
* The start of the period, should be rounded, see public
* getOccupancy
* @param tsEnd
* The end of the period, should be rounded, see public
* getOccupancy
* @return A HashMap organized as follows (building -> list of free rooms in
* the building)
*/
private HashMap<String, List<Occupancy>> getOccupancyOfAnyFreeRoom(
boolean onlyFreeRooms, long tsStart, long tsEnd, int userGroup) {
log(LOG_SIDE.SERVER, Level.INFO,
"Requesting occupancy of any free rooms");
HashMap<String, List<Occupancy>> result = new HashMap<String, List<Occupancy>>();
if (onlyFreeRooms) {
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
// first select rooms totally free
String request = "SELECT rl.uid, rl.doorCode, rl.capacity "
+ "FROM `fr-roomslist` rl "
+ "WHERE rl.uid NOT IN("
+ "SELECT ro.uid FROM `fr-occupancy` ro "
+ "WHERE ((ro.timestampEnd <= ? AND ro.timestampEnd >= ?) "
+ "OR (ro.timestampStart <= ? AND ro.timestampStart >= ?)"
+ "OR (ro.timestampStart <= ? AND ro.timestampEnd >= ?)) "
+ "AND ro.type LIKE ?) AND rl.accessGroup <= ? AND rl.enabled = 1";
PreparedStatement query = connectBDD.prepareStatement(request);
query.setLong(1, tsEnd);
query.setLong(2, tsStart);
query.setLong(3, tsEnd);
query.setLong(4, tsStart);
query.setLong(5, tsStart);
query.setLong(6, tsEnd);
query.setString(7, OCCUPANCY_TYPE.ROOM.toString());
query.setInt(8, userGroup);
ResultSet resultQuery = query.executeQuery();
ArrayList<String> uidsList = new ArrayList<String>();
while (resultQuery.next()) {
String uid = resultQuery.getString("uid");
uidsList.add(uid);
}
if (uidsList.isEmpty()) {
log(LOG_SIDE.SERVER, Level.WARNING,
"No rooms are free during period start = "
+ tsStart + " end = " + tsEnd);
return new HashMap<String, List<Occupancy>>();
}
return getOccupancyOfSpecificRoom(uidsList, onlyFreeRooms,
tsStart, tsEnd, userGroup);
} catch (SQLException e) {
e.printStackTrace();
log(LOG_SIDE.SERVER, Level.SEVERE,
"SQL error for occupancy of any free room, start = "
+ tsStart + " end = " + tsEnd);
}
} else {
log(LOG_SIDE.SERVER, Level.WARNING,
"Getting request for any free rooms, with onlyFreeRoom attributes false");
}
return result;
}
/**
* Return the occupancies of the specified list of rooms.
*
* @param uidList
* The list of rooms to be checked
* @param onlyFreeRooms
* If the results should contains only entirely free rooms or not
* @param tsStart
* The start of the period, should be rounded, see public
* getOccupancy
* @param tsEnd
* The end of the period, should be rounded, see public
* getOccupancy
* @return A HashMap organized as follows (building -> list of rooms in the
* building)
*/
private HashMap<String, List<Occupancy>> getOccupancyOfSpecificRoom(
List<String> uidList, boolean onlyFreeRooms, long tsStart,
long tsEnd, int userGroup) {
if (uidList.isEmpty()) {
return getOccupancyOfAnyFreeRoom(onlyFreeRooms, tsStart, tsEnd,
userGroup);
}
uidList = Utils.removeDuplicate(uidList);
log(LOG_SIDE.SERVER, Level.INFO,
"Requesting occupancy of specific list of rooms " + uidList);
HashMap<String, List<Occupancy>> result = new HashMap<String, List<Occupancy>>();
int numberOfRooms = uidList.size();
// formatting for the query
String roomsListQueryFormat = "";
for (int i = 0; i < numberOfRooms - 1; ++i) {
roomsListQueryFormat += "?,";
}
roomsListQueryFormat += "?";
Connection connectBDD;
try {
connectBDD = connMgr.getConnection();
String request = "SELECT rl.uid, rl.doorCode, rl.capacity, rl.alias, rl.typeEN, rl.typeFR, "
+ "uo.count, uo.timestampStart, uo.timestampEnd, uo.type "
+ "FROM `fr-roomslist` rl, `fr-occupancy` uo "
+ "WHERE rl.uid = uo.uid AND rl.uid IN("
+ roomsListQueryFormat
+ ") "
+ "AND ((uo.timestampEnd <= ? AND uo.timestampEnd >= ? ) "
+ "OR (uo.timestampStart <= ? AND uo.timestampStart >= ?)"
+ "OR (uo.timestampStart <= ? AND uo.timestampEnd >= ?)) "
+ "ORDER BY rl.uid ASC, uo.timestampStart ASC";
PreparedStatement query = connectBDD.prepareStatement(request);
int i = 1;
for (; i <= numberOfRooms; ++i) {
query.setString(i, uidList.get(i - 1));
}
query.setLong(i, tsEnd);
query.setLong(i + 1, tsStart);
query.setLong(i + 2, tsEnd);
query.setLong(i + 3, tsStart);
query.setLong(i + 4, tsStart);
query.setLong(i + 5, tsEnd);
ResultSet resultQuery = query.executeQuery();
String currentUID = null;
String currentDoorCode = null;
OccupancySorted currentOccupancy = null;
// We can add the ActualOccupation as they come, no worries about
// the order, the class OccupancySorted sorts and deals with it
while (resultQuery.next()) {
// extract attributes of record
long start = resultQuery.getLong("timestampStart");
long end = resultQuery.getLong("timestampEnd");
String uid = resultQuery.getString("uid");
int count = resultQuery.getInt("count");
String doorCode = resultQuery.getString("doorCode");
String alias = resultQuery.getString("alias");
String typeFR = resultQuery.getString("typeFR");
String typeEN = resultQuery.getString("typeEN");
OCCUPANCY_TYPE type = OCCUPANCY_TYPE.valueOf(resultQuery
.getString("type"));
boolean available = (type == OCCUPANCY_TYPE.USER) ? true
: false;
int capacity = resultQuery.getInt("capacity");
double ratio = capacity > 0 ? (double) count / capacity : 0.0;
FRPeriod period = new FRPeriod(start, end, false);
FRRoom mRoom = new FRRoom(doorCode, uid);
mRoom.setCapacity(capacity);
if (alias != null) {
mRoom.setDoorCodeAlias(alias);
}
if (typeEN != null) {
mRoom.setTypeEN(typeEN);
}
if (typeFR != null) {
mRoom.setTypeFR(typeFR);
}
// if this is the first iteration
if (currentUID == null) {
currentUID = uid;
currentDoorCode = mRoom.getDoorCode();
currentOccupancy = new OccupancySorted(mRoom, tsStart,
tsEnd, onlyFreeRooms);
}
// we move on to the next room thus re-initialize attributes
// for the loop, as well as storing the previous room in the
// resulting HashMap
if (!uid.equals(currentUID)) {
Occupancy mOccupancy = currentOccupancy.getOccupancy();
addToHashMapOccupancy(currentDoorCode, mOccupancy, result);
// remove the room from the list, this is important as all
// the rooms might not be matched by the query (if there are
// no entry for instance)
uidList.remove(currentUID);
// re-initialize the value, and continue the process for
// other rooms
currentDoorCode = mRoom.getDoorCode();
currentOccupancy = new OccupancySorted(mRoom, tsStart,
tsEnd, onlyFreeRooms);
currentUID = uid;
}
ActualOccupation accOcc = new ActualOccupation(period,
available);
accOcc.setProbableOccupation(count);
accOcc.setRatioOccupation(ratio);
currentOccupancy.addActualOccupation(accOcc);
}
// the last room has not been added yet
if (currentOccupancy != null && currentOccupancy.size() != 0) {
Occupancy mOccupancy = currentOccupancy.getOccupancy();
addToHashMapOccupancy(currentDoorCode, mOccupancy, result);
// remove the room from the list
uidList.remove(currentUID);
}
// for all the others rooms that hasn't been matched in the query,
// we need to add them too
if (!uidList.isEmpty()) {
roomsListQueryFormat = "";
for (i = 0; i < uidList.size() - 1; ++i) {
roomsListQueryFormat += "?,";
}
roomsListQueryFormat += "?";
String infoRequest = "SELECT rl.uid, rl.doorCode, rl.capacity, rl.alias, rl.typeEN, rl.typeFR "
+ "FROM `fr-roomslist` rl "
+ "WHERE rl.uid IN("
+ roomsListQueryFormat + ")";
PreparedStatement infoQuery = connectBDD
.prepareStatement(infoRequest);
for (i = 1; i <= uidList.size(); ++i) {
infoQuery.setString(i, uidList.get(i - 1));
}
ResultSet infoRoom = infoQuery.executeQuery();
while (infoRoom.next()) {
String uid = infoRoom.getString("uid");
String doorCode = infoRoom.getString("doorCode");
int capacity = infoRoom.getInt("capacity");
String alias = infoRoom.getString("alias");
String typeFR = infoRoom.getString("typeFR");
String typeEN = infoRoom.getString("typeEN");
FRRoom mRoom = new FRRoom(doorCode, uid);
mRoom.setCapacity(capacity);
if (alias != null) {
mRoom.setDoorCodeAlias(alias);
}
if (typeEN != null) {
mRoom.setTypeEN(typeEN);
}
if (typeFR != null) {
mRoom.setTypeFR(typeFR);
}
currentOccupancy = new OccupancySorted(mRoom, tsStart,
tsEnd, onlyFreeRooms);
FRPeriod period = new FRPeriod(tsStart, tsEnd, false);
ActualOccupation accOcc = new ActualOccupation(period, true);
accOcc.setProbableOccupation(0);
currentOccupancy.addActualOccupation(accOcc);
Occupancy mOccupancy = currentOccupancy.getOccupancy();
addToHashMapOccupancy(mRoom.getDoorCode(), mOccupancy,
result);
}
}
} catch (SQLException e) {
log(LOG_SIDE.SERVER, Level.SEVERE,
"SQL error of occupancy of specific list of rooms "
+ uidList + " start = " + tsStart + " end = "
+ tsEnd);
}
return result;
}
/**
* Add to a given HashMap the given occupancy by extracting the building
* from the doorCode. The HashMap maps a building to a list of Occupancy for
* rooms in its building.
*
* @param doorCode
* The door code of the room to add
* @param mOcc
* The Occupancy of the room
* @param result
* The HashMap in which we add the room
*/
private void addToHashMapOccupancy(String doorCode, Occupancy mOcc,
HashMap<String, List<Occupancy>> result) {
if (mOcc == null || doorCode == null) {
return;
}
String building = Utils.extractBuilding(doorCode);
List<Occupancy> occ = result.get(building);
if (occ == null) {
occ = new ArrayList<Occupancy>();
result.put(building, occ);
}
occ.add(mOcc);
}
/**
* Returns all the rooms that satisfies the hint given in the request.
*
* The hint may be the start of the door code or the uid or even the alias.
*
* Constraints should be at least 2 characters long. You can specify a list
* of forbidden rooms the server should not include in the response. The
* number of results is bounded by the constant LIMIT_AUTOCOMPLETE.
*
*/
@Override
public AutoCompleteReply autoCompleteRoom(AutoCompleteRequest request)
throws TException {
log(LOG_SIDE.SERVER, Level.INFO,
"Autocomplete of " + request.getConstraint());
AutoCompleteReply reply = new AutoCompleteReply(
HttpURLConnection.HTTP_OK, "" + HttpURLConnection.HTTP_OK);
String constraint = request.getConstraint();
if (constraint.length() < 2) {
return new AutoCompleteReply(HttpURLConnection.HTTP_BAD_REQUEST,
"Constraints should be at least 2 characters long.");
}
List<FRRoom> rooms = new ArrayList<FRRoom>();
Set<String> forbiddenRooms = request.getForbiddenRoomsUID();
String forbidRoomsSQL = "";
if (forbiddenRooms != null) {
for (int i = forbiddenRooms.size(); i > 0; --i) {
if (i <= 1) {
forbidRoomsSQL += "?";
} else {
forbidRoomsSQL += "?,";
}
}
}
// avoid all whitespaces for requests
constraint = constraint.replaceAll("\\s+", "");
try {
Connection connectBDD = connMgr.getConnection();
String requestSQL = "";
if (forbiddenRooms == null) {
requestSQL = "SELECT * "
+ "FROM `fr-roomslist` rl "
+ "WHERE (rl.uid LIKE (?) OR rl.doorCodeWithoutSpace LIKE (?) OR rl.alias LIKE (?)) "
+ "AND rl.groupAccess <= ? AND rl.enabled = 1 "
+ "ORDER BY rl.doorCode ASC LIMIT "
+ LIMIT_AUTOCOMPLETE;
} else {
requestSQL = "SELECT * "
+ "FROM `fr-roomslist` rl "
+ "WHERE (rl.uid LIKE (?) OR rl.doorCodeWithoutSpace LIKE (?) OR rl.alias LIKE (?)) "
+ "AND rl.groupAccess <= ? AND rl.enabled = 1 AND rl.uid NOT IN ("
+ forbidRoomsSQL + ") "
+ "ORDER BY rl.doorCode ASC LIMIT "
+ LIMIT_AUTOCOMPLETE;
}
PreparedStatement query = connectBDD.prepareStatement(requestSQL);
query.setString(1, constraint + "%");
query.setString(2, constraint + "%");
query.setString(3, constraint + "%");
query.setInt(4, request.getUserGroup());
if (forbiddenRooms != null) {
int i = 5;
for (String roomUID : forbiddenRooms) {
query.setString(i, roomUID);
++i;
}
}
// filling the query with values
ResultSet resultQuery = query.executeQuery();
while (resultQuery.next()) {
FRRoom frRoom = new FRRoom(resultQuery.getString("doorCode"),
resultQuery.getString("uid"));
int cap = resultQuery.getInt("capacity");
if (cap > 0) {
frRoom.setCapacity(cap);
}
String alias = resultQuery.getString("alias");
if (alias != null) {
frRoom.setDoorCodeAlias(alias);
}
String typeFR = resultQuery.getString("typeFR");
if (typeFR != null) {
frRoom.setTypeFR(typeFR);
}
String typeEN = resultQuery.getString("typeEN");
if (typeEN != null) {
frRoom.setTypeEN(typeEN);
}
rooms.add(frRoom);
}
reply = new AutoCompleteReply(HttpURLConnection.HTTP_OK, ""
+ HttpURLConnection.HTTP_OK);
reply.setListRoom(Utils.sortRoomsByBuilding(rooms));
} catch (SQLException e) {
reply = new AutoCompleteReply(
HttpURLConnection.HTTP_INTERNAL_ERROR, ""
+ HttpURLConnection.HTTP_INTERNAL_ERROR);
e.printStackTrace();
log(LOG_SIDE.SERVER, Level.SEVERE,
"SQL error for autocomplete request with constraint "
+ constraint);
}
return reply;
}
/**
* The client can specify a user occupancy during a given period, multiple
* submits for the same period (and same user) are not allowed, we return a
* HTTP_CONFLICT in that case.
*/
@Override
public ImWorkingReply indicateImWorking(ImWorkingRequest request)
throws TException {
WorkingOccupancy work = request.getWork();
FRPeriod period = work.getPeriod();
FRRoom room = work.getRoom();
boolean success = insertOccupancy(period, OCCUPANCY_TYPE.USER, room,
request.getHash());
log(LOG_SIDE.SERVER, Level.INFO, "ImWorkingThere request for room "
+ room.getDoorCode() + " : " + success);
if (success) {
return new ImWorkingReply(HttpURLConnection.HTTP_OK, "");
} else {
return new ImWorkingReply(HttpURLConnection.HTTP_CONFLICT,
"User already said he was working there");
}
}
@Override
public WhoIsWorkingReply whoIsWorking(WhoIsWorkingRequest request)
throws TException {
// TODO Auto-generated method stub
return null;
}
/**
* Pre-format the message for logging
*
* @param message
* The message
* @param path
* The path to the file where the bug happened
* @return A pre-formatted message containing the path and the message.
*/
private String formatPathMessageLogAndroid(String message, String path) {
return path + " / " + message;
}
/**
* Log Severe messages coming from external clients such as android.
*/
@Override
public void logSevere(LogMessage arg0) throws TException {
log(LOG_SIDE.ANDROID, Level.SEVERE,
formatPathMessageLogAndroid(arg0.getMessage(), arg0.getPath()),
arg0.getTimestamp());
}
/**
* Log Warning messages coming from external clients such as android.
*/
@Override
public void logWarning(LogMessage arg0) throws TException {
log(LOG_SIDE.ANDROID, Level.WARNING,
formatPathMessageLogAndroid(arg0.getMessage(), arg0.getPath()),
arg0.getTimestamp());
}
}
|
package edu.yu.einstein.wasp.plugin.illumina.batch.tasklet;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.StepContribution;
import org.springframework.batch.core.scope.context.ChunkContext;
import org.springframework.batch.repeat.RepeatStatus;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import edu.yu.einstein.wasp.daemon.batch.tasklets.AbandonMessageHandlingTasklet;
import edu.yu.einstein.wasp.exception.GridException;
import edu.yu.einstein.wasp.exception.InvalidFileTypeException;
import edu.yu.einstein.wasp.exception.MetadataException;
import edu.yu.einstein.wasp.exception.SampleException;
import edu.yu.einstein.wasp.grid.GridHostResolver;
import edu.yu.einstein.wasp.grid.work.GridResult;
import edu.yu.einstein.wasp.grid.work.GridTransportConnection;
import edu.yu.einstein.wasp.grid.work.GridWorkService;
import edu.yu.einstein.wasp.grid.work.SoftwareManager;
import edu.yu.einstein.wasp.grid.work.WorkUnit;
import edu.yu.einstein.wasp.grid.work.WorkUnit.ProcessMode;
import edu.yu.einstein.wasp.model.FileGroup;
import edu.yu.einstein.wasp.model.FileHandle;
import edu.yu.einstein.wasp.model.FileType;
import edu.yu.einstein.wasp.model.Run;
import edu.yu.einstein.wasp.model.Sample;
import edu.yu.einstein.wasp.model.SampleSource;
import edu.yu.einstein.wasp.plugin.fileformat.service.FastqService;
import edu.yu.einstein.wasp.plugin.fileformat.service.impl.FastqServiceImpl;
import edu.yu.einstein.wasp.plugin.illumina.service.WaspIlluminaService;
import edu.yu.einstein.wasp.plugin.illumina.software.IlluminaHiseqSequenceRunProcessor;
import edu.yu.einstein.wasp.service.FileService;
import edu.yu.einstein.wasp.service.MetaMessageService;
import edu.yu.einstein.wasp.service.RunService;
import edu.yu.einstein.wasp.service.SampleService;
import edu.yu.einstein.wasp.software.SoftwarePackage;
import edu.yu.einstein.wasp.util.PropertyHelper;
/**
*
*
* @author calder
*
*/
public class RegisterFilesTasklet extends AbandonMessageHandlingTasklet {
@Autowired
private RunService runService;
@Autowired
private SampleService sampleService;
@Autowired
private FileService fileService;
@Autowired
private FastqService fastqService;
@Autowired
private MetaMessageService metaMessageService;
@Autowired
private GridHostResolver hostResolver;
@Autowired
private IlluminaHiseqSequenceRunProcessor casava;
private GridTransportConnection transportConnection;
@Autowired
private FileType fastqFileType;
@Autowired
private FileType waspIlluminaHiseqQcMetricsFileType;
@Autowired
private WaspIlluminaService waspIlluminaService;
private int runId;
private Run run;
private String workingDirectory;
private Logger logger = LoggerFactory.getLogger(this.getClass());
public RegisterFilesTasklet() {
// required by cglib
}
public RegisterFilesTasklet(Integer runId) {
this.runId = runId;
}
private GridWorkService workService;
@Override
@Transactional("entityManager")
public RepeatStatus execute(StepContribution contrib, ChunkContext context) throws Exception {
//TODO: this should execute WorkUnits in Grid Task, not in sendExecToRemote.
run = runService.getRunById(runId);
List<SoftwarePackage> sd = new ArrayList<SoftwarePackage>();
sd.add(casava);
WorkUnit w = new WorkUnit();
w.setProcessMode(ProcessMode.SINGLE);
// set casava as software dependency to ensure we get sent to the
// correct host
w.setSoftwareDependencies(sd);
// save the GridWorkService so we can send many jobs there
workService = hostResolver.getGridWorkService(w);
transportConnection = workService.getTransportConnection();
String stageDir = transportConnection.getConfiguredSetting("illumina.data.stage");
if (!PropertyHelper.isSet(stageDir))
throw new GridException("illumina.data.stage is not defined!");
workingDirectory = stageDir + "/" + run.getName() + "/";
w.setWorkingDirectory(workingDirectory);
w.setCommand("mkdir -p wasp && cd wasp && ln -fs ../reports . && mkdir -p sequence && cd sequence");
// rename files with spaces in names
w.addCommand("shopt -s nullglob");
|
package org.jtalks.poulpe.web.osod;
import org.hibernate.SessionFactory;
/**
* Since ZK Listeners in zk.xml can't be manageable by Spring IoC, we have to define a singleton which still is
* instantiated with Spring, but is accessed in {@link OpenSessionOnDesktopZkListener}.
*
* @author stanislav bashkirtsev
*/
public final class SingletonOpenSessionsHolder {
private static SingletonOpenSessionsHolder HOLDER;
private final OpenSessions openSessions;
private SingletonOpenSessionsHolder(SessionFactory sessionFactory) {
this.openSessions = new OpenSessions(sessionFactory);
}
/**
* A method to instantiate the sessions, it should be called only from Spring context, others should access class
* via {@link #getOpenSessions()}.
*
* @param sessionFactory a session factory to be pushed to {@link OpenSessions}
* @return a new instance of {@link OpenSessions} that can be used in {@link OpenSessionOnDesktopZkListener}
*/
public synchronized static OpenSessions instantiate(SessionFactory sessionFactory) {
if (HOLDER == null) {
HOLDER = new SingletonOpenSessionsHolder(sessionFactory);
}
return HOLDER.openSessions;
}
public static OpenSessions getOpenSessions() {
if (HOLDER == null) {
throw new IllegalStateException("Class should be instantiated with 'instantiate()' method first.");
}
return HOLDER.openSessions;
}
}
|
package com.google.enterprise.connector.dctm;
import com.google.enterprise.connector.dctm.dctmdfcwrap.IDctmClient;
import com.google.enterprise.connector.dctm.dctmdfcwrap.IDctmLocalClient;
import com.google.enterprise.connector.dctm.dctmdfcwrap.IDctmLoginInfo;
import com.google.enterprise.connector.dctm.dctmdfcwrap.IDctmSession;
import com.google.enterprise.connector.dctm.dctmdfcwrap.IDctmSessionManager;
import com.google.enterprise.connector.dctm.dfcwrap.IClient;
import com.google.enterprise.connector.dctm.dfcwrap.ILocalClient;
import com.google.enterprise.connector.dctm.dfcwrap.ISession;
import com.google.enterprise.connector.dctm.dfcwrap.ISessionManager;
import com.google.enterprise.connector.spi.AuthenticationManager;
import com.google.enterprise.connector.spi.AuthorizationManager;
import com.google.enterprise.connector.spi.QueryTraversalManager;
import com.google.enterprise.connector.spi.RepositoryException;
import com.google.enterprise.connector.spi.Session;
public class DctmSession implements Session{
IClient dctmClient;
ILocalClient dctmLocalClient;
ISessionManager dctmsessionmanager;
ISession dctmsession;
String docbase;
public DctmSession(){
IDctmLoginInfo dctmLoginInfo=null;
docbase="gdoc";
dctmClient=new IDctmClient();
dctmLocalClient=dctmClient.getLocalClientEx();
dctmsessionmanager=dctmLocalClient.newSessionManager();
dctmLoginInfo=new IDctmLoginInfo();
dctmLoginInfo.setUser("emilie");
dctmLoginInfo.setPassword("emilie2");
dctmsessionmanager.setIdentity(docbase,dctmLoginInfo);
dctmsession=dctmsessionmanager.newSession(docbase);
}
public QueryTraversalManager getQueryTraversalManager(){
DctmQueryTraversalManager DctmQtm=new DctmQueryTraversalManager();
DctmQtm.setIDctmSession((IDctmSession)dctmsession);
return DctmQtm;
}
/**
* Gets an AuthenticationManager. It is permissible to return null.
* A null return means that this implementation does not support an
* Authentication Manager. This may be for one of these reasons:
* <ul>
* <li> Authentication is not needed for this data source
* <li> Authentication is handled through another GSA-supported mechanism,
* such as LDAP
* </ul>
* @return a AuthenticationManager - may be null
* @throws RepositoryException
*/
public AuthenticationManager getAuthenticationManager() {
AuthenticationManager DctmAm=new DctmAuthenticationManager();
return DctmAm;
}
/**
* Gets an AuthorizationManager. It is permissible to return null.
* A null return means that this implementation does not support an
* Authorization Manager. This may be for one of these reasons:
* <ul>
* <li> Authorization is not needed for this data source - all documents are
* public
* <li> Authorization is handled through another GSA-supported mechanism,
* such as NTLM or Basic Auth
* </ul>
* @return a AuthorizationManager - may be null
* @throws RepositoryException
*/
public AuthorizationManager getAuthorizationManager(){
AuthorizationManager DctmAzm=new DctmAuthorizationManager();
return DctmAzm;
}
public IClient getIClient() {
return dctmClient;
}
public void setIClient(IDctmClient dctmClient) {
this.dctmClient = dctmClient;
}
public ILocalClient getDctmLocalClient() {
return dctmLocalClient;
}
public void setILocalClient(IDctmLocalClient dctmLocalClient) {
this.dctmLocalClient = dctmLocalClient;
}
public ISessionManager getISessionmanager() {
return dctmsessionmanager;
}
public ISession getISession() {
return dctmsession;
}
public void setISessionmanager(IDctmSessionManager dctmsessionmanager) {
this.dctmsessionmanager = dctmsessionmanager;
}
}
|
package org.opensingular.server.commons.service;
import java.util.List;
import javax.inject.Inject;
import javax.transaction.Transactional;
import org.opensingular.flow.core.FlowDefinition;
import org.opensingular.flow.core.FlowInstance;
import org.opensingular.form.SInstance;
import org.opensingular.form.persistence.entity.FormEntity;
import org.opensingular.server.commons.persistence.entity.form.PetitionEntity;
import org.opensingular.server.commons.service.dto.PetitionSendedFeedback;
@Transactional
public class DefaultPetitionSender implements PetitionSender {
@Inject
private PetitionService<PetitionEntity, PetitionInstance> petitionService;
@Inject
private FormPetitionService<PetitionEntity> formPetitionService;
@Override
public PetitionSendedFeedback send(PetitionInstance petition, SInstance instance, String codResponsavel) {
final List<FormEntity> consolidatedDrafts = formPetitionService.consolidateDrafts(petition);
final FlowDefinition<?> flowDefinition = PetitionUtil.getProcessDefinition(petition.getEntity());
petitionService.onBeforeStartProcess(petition, instance, codResponsavel);
FlowInstance flowInstance = petitionService.startNewProcess(petition, flowDefinition);
petitionService.onAfterStartProcess(petition, instance, codResponsavel, flowInstance);
petitionService.savePetitionHistory(petition, consolidatedDrafts);
return new PetitionSendedFeedback(petition);
}
}
|
package org.sagebionetworks.file.worker;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.fail;
import static org.sagebionetworks.repo.manager.file.FileHandleArchivalManager.S3_TAG_ARCHIVED;
import static org.sagebionetworks.repo.manager.file.FileHandleArchivalManager.S3_TAG_SIZE_THRESHOLD;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.temporal.ChronoUnit;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.opentest4j.AssertionFailedError;
import org.sagebionetworks.AsynchronousJobWorkerHelper;
import org.sagebionetworks.StackConfiguration;
import org.sagebionetworks.aws.CannotDetermineBucketLocationException;
import org.sagebionetworks.aws.SynapseS3Client;
import org.sagebionetworks.ids.IdGenerator;
import org.sagebionetworks.ids.IdType;
import org.sagebionetworks.repo.manager.S3TestUtils;
import org.sagebionetworks.repo.manager.UserManager;
import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.dbo.FileMetadataUtils;
import org.sagebionetworks.repo.model.dbo.dao.TestUtils;
import org.sagebionetworks.repo.model.dbo.file.FileHandleDao;
import org.sagebionetworks.repo.model.dbo.persistence.DBOFileHandle;
import org.sagebionetworks.repo.model.file.FileHandleArchivalRequest;
import org.sagebionetworks.repo.model.file.FileHandleArchivalResponse;
import org.sagebionetworks.repo.model.file.FileHandleStatus;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.util.Pair;
import org.sagebionetworks.util.TimeUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import com.amazonaws.services.s3.model.Tag;
@ExtendWith(SpringExtension.class)
@ContextConfiguration(locations = { "classpath:test-context.xml" })
public class FileHandleArchivalWorkerIngegrationTest {
public static final Long MAX_WAIT_MS = 1000L * 30L;
@Autowired
private AsynchronousJobWorkerHelper asynchronousJobWorkerHelper;
@Autowired
private UserManager userManager;
@Autowired
private FileHandleDao fileHandleDao;
@Autowired
private IdGenerator idGenerator;
@Autowired
private StackConfiguration config;
@Autowired
private SynapseS3Client s3Client;
private UserInfo adminUser;
private String bucket;
@BeforeEach
public void setup() {
fileHandleDao.truncateTable();
adminUser = userManager.getUserInfo(BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId());
bucket = config.getS3Bucket();
}
@AfterEach
public void cleanup() {
S3TestUtils.doDeleteAfter(s3Client);
fileHandleDao.truncateTable();
}
@Test
public void testArchivalRequestWithAfterTimeWindow() throws Exception {
Instant modifiedOn = Instant.now();
String availableFileKey = uploadFile("key_0", true);
// Available file modified after the time window
Long availableFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.AVAILABLE, availableFileKey).getId();
// Unlinked file modified after the time window
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_1", true)).getId();
// Unlinked file modified after the time window, copy of the available
Long unlinkedFileCopy = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, availableFileKey).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(0L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(availableFile, FileHandleStatus.AVAILABLE, null, false);
verify(unlinkedFile, FileHandleStatus.UNLINKED, null, false);
verify(unlinkedFileCopy, FileHandleStatus.UNLINKED, null, false);
});
}
@Test
public void testArchivalRequestWithUnlinked() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
// Unlinked file modified before the time window
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_0", true)).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, true);
});
}
@Test
public void testArchivalRequestWithUnlinkedInExternalBucket() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
// Unlinked file modified before the time window
Long unlinkedFile = createDBOFile("anotherBucket", modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_0", false)).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(0L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.UNLINKED, null, false);
});
}
@Test
public void testArchivalRequestWithAvailable() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
// Available file modified before the time window
Long availableFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.AVAILABLE, uploadFile("key_0", true)).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(0L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(availableFile, FileHandleStatus.AVAILABLE, null, false);
});
}
@Test
public void testArchivalRequestWithUnlinkedWithCopy() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
String fileKey = uploadFile("key_0", true);
// Unlinked file modified before the time window
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, fileKey).getId();
// Unlinked file modified before the time window, copy of the previous one
Long unlinkedFileCopy = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, fileKey).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, true);
verify(unlinkedFileCopy, FileHandleStatus.ARCHIVED, null, true);
});
}
@Test
public void testArchivalRequestWithUnlinkedWithAvailableCopy() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
String fileKey = uploadFile("key_0", true);
// Unlinked file modified before the time window
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, fileKey).getId();
// Available file modified before the time window, copy of the previous one
Long availableFileCopy = createDBOFile(bucket, modifiedOn, FileHandleStatus.AVAILABLE, fileKey).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, false); // The s3 object is not tagged as a copy is still available
verify(availableFileCopy, FileHandleStatus.AVAILABLE, null, false);
});
}
@Test
public void testArchivalRequestWithUnlinkedWithAvailableCopyAfterTimeWindow() throws Exception {
Instant now = Instant.now();
Instant modifiedOn = now.minus(31, ChronoUnit.DAYS);
String fileKey = uploadFile("key_0", true);
// Unlinked file modified before the time window
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, fileKey).getId();
// Available file modified after the time window, copy of the previous one
Long availableFileCopy = createDBOFile(bucket, now, FileHandleStatus.AVAILABLE, fileKey).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, false); // The s3 object is not tagged as a copy is still available
verify(availableFileCopy, FileHandleStatus.AVAILABLE, null, false);
});
}
@Test
public void testArchivalRequestWithUnlinkedWithUnlinkedCopyAfterTimeWindow() throws Exception {
Instant now = Instant.now();
Instant modifiedOn = now.minus(31, ChronoUnit.DAYS);
String fileKey = uploadFile("key_0", true);
// Unlinked file modified before the time window
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, fileKey).getId();
// Unlinked file modified after the time window, copy of the previous one
Long unlinkedFileCopy = createDBOFile(bucket, now, FileHandleStatus.UNLINKED, fileKey).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, false); // The s3 object is not tagged as a copy is unlinked but now within the time window
verify(unlinkedFileCopy, FileHandleStatus.UNLINKED, null, false);
});
}
@Test
public void testArchivalRequestWithUnlinkedUnderSizeThreshold() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
// Unlinked file modified before the time window under the size threshold for tagging
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_0", true), S3_TAG_SIZE_THRESHOLD - 1, false, null).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, false);
});
}
@Test
public void testArchivalRequestWithUnlinkedNotExisting() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
// Unlinked file modified before the time window under the size threshold for tagging
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_0", false)).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
assertFalse(fileHandleDao.doesExist(unlinkedFile.toString()));
});
}
@Test
public void testArchivalRequestWithUnlinkedAndPreview() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
Long preview = createDBOFile(bucket, modifiedOn, FileHandleStatus.AVAILABLE, uploadFile("key_0_preview", true), 123L, true, null).getId();
// Unlinked file modified before the time window with a preview
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_0", true), S3_TAG_SIZE_THRESHOLD, false, preview).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
// The preview was deleted as unused
assertFalse(fileHandleDao.doesExist(preview.toString()));
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, true);
});
}
@Test
public void testArchivalRequestWithUnlinkedAndLinkedPreview() throws Exception {
Instant modifiedOn = Instant.now().minus(31, ChronoUnit.DAYS);
Long preview = createDBOFile(bucket, modifiedOn, FileHandleStatus.AVAILABLE, uploadFile("preview", true), 123L, true, null).getId();
// Unlinked file modified before the time window with a preview
Long unlinkedFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.UNLINKED, uploadFile("key_0", true), S3_TAG_SIZE_THRESHOLD, false, preview).getId();
// Available file modified before the time window with same preview
Long availableFile = createDBOFile(bucket, modifiedOn, FileHandleStatus.AVAILABLE, uploadFile("key_1", true), S3_TAG_SIZE_THRESHOLD, false, preview).getId();
FileHandleArchivalRequest request = new FileHandleArchivalRequest();
asynchronousJobWorkerHelper.assertJobResponse(adminUser, request, (FileHandleArchivalResponse response) -> {
assertEquals(1L, response.getCount());
}, MAX_WAIT_MS);
verifyAsynch(() -> {
// The preview is still available as it is used by another file handle
verify(preview, FileHandleStatus.AVAILABLE, null, false);
verify(unlinkedFile, FileHandleStatus.ARCHIVED, null, true);
verify(availableFile, FileHandleStatus.AVAILABLE, preview, false);
});
}
private void verifyAsynch(Runnable runnable) throws Exception {
TimeUtils.waitFor(MAX_WAIT_MS, 500, () -> {
try {
runnable.run();
} catch (AssertionFailedError assertion) {
assertion.printStackTrace();
return new Pair<Boolean, Void>(false, null);
}
return new Pair<Boolean, Void>(true, null);
});
}
private void verify(Long id, FileHandleStatus status, Long previewId, boolean tagged) {
S3FileHandle handle = (S3FileHandle) fileHandleDao.get(id.toString());
assertEquals(status, handle.getStatus());
assertEquals(handle.getPreviewId(), previewId == null ? null : previewId.toString());
List<Tag> tags;
try {
tags = s3Client.getObjectTags(handle.getBucketName(), handle.getKey());
} catch (CannotDetermineBucketLocationException ex) {
if (!tagged) { // If the bucket does not exists and the object wasn't supposed to be tagged then it's fine
return;
}
throw ex;
}
if (tagged && !tags.stream().filter(t->t.equals(S3_TAG_ARCHIVED)).findFirst().isPresent()) {
fail("The file handle with key " + handle.getKey() + " was not tagged");
} else if (!tagged && !tags.isEmpty()) {
fail("The file handle with key " + handle.getKey() + " was not supposed to be tagged");
}
}
private String uploadFile(String prefix, boolean doUpload) throws Exception {
String key = "tests/" + FileHandleArchivalWorkerIngegrationTest.class.getSimpleName() + "/" + prefix + "/" + UUID.randomUUID().toString();
if (doUpload) {
S3TestUtils.createObjectFromString(bucket, key, "Some data", s3Client);
}
return key;
}
private DBOFileHandle createDBOFile(String bucket, Instant updatedOn, FileHandleStatus status, String key) {
return createDBOFile(bucket, updatedOn, status, key, S3_TAG_SIZE_THRESHOLD, false, null);
}
private DBOFileHandle createDBOFile(String bucket, Instant updatedOn, FileHandleStatus status, String key, Long contentSize, boolean isPreview, Long previewId) {
DBOFileHandle file = FileMetadataUtils.createDBOFromDTO(TestUtils.createS3FileHandle(adminUser.getId().toString(), idGenerator.generateNewId(IdType.FILE_IDS).toString()));
file.setBucketName(bucket);
file.setUpdatedOn(Timestamp.from(updatedOn));
file.setKey(key);
file.setStatus(status.name());
file.setIsPreview(isPreview);
file.setPreviewId(previewId);
file.setContentSize(contentSize);
fileHandleDao.createBatchDbo(Arrays.asList(file));
return file;
}
}
|
package gov.nih.nci.ncicb.cadsr.common.persistence.dao.jdbc;
import gov.nih.nci.ncicb.cadsr.common.dto.InstructionTransferObject;
import gov.nih.nci.ncicb.cadsr.common.exception.DMLException;
import gov.nih.nci.ncicb.cadsr.common.persistence.dao.ModuleInstructionDAO;
import gov.nih.nci.ncicb.cadsr.common.resource.Instruction;
import java.util.List;
import javax.sql.DataSource;
public class JDBCModuleInstructionDAOV2 extends JDBCInstructionDAOV2
implements ModuleInstructionDAO {
public JDBCModuleInstructionDAOV2(DataSource dataSource) {
super(dataSource);
}
/**
* Creates a new Module instruction component (just the header info).
*
* @param <b>formInstr</b> Module Instruction object
*
* @return <b>int</b> 1 - success, 0 - failure.
*
* @throws <b>DMLException</b>
*/
public int createInstruction(Instruction moduleInstr, String parentId)
throws DMLException {
return super.createInstruction(moduleInstr,parentId,"MODULE_INSTR","MODULE_INSTRUCTION");
}
public List getInstructions(String moduleID)
throws DMLException {
return super.getInstructions(moduleID,"MODULE_INSTR");
}
/**
* Test application
*/
public static void main(String[] args) {
//ServiceLocator locator = new SimpleServiceLocator();
JDBCModuleInstructionDAOV2 test = new JDBCModuleInstructionDAOV2(null);
// test for createModuleInstructionComponent
try {
Instruction moduleInst = new InstructionTransferObject();
moduleInst.setVersion(new Float(2.31));
moduleInst.setLongName("Test Mod Instr Long Name 030204 1");
moduleInst.setPreferredDefinition("Test Mod instr pref def");
moduleInst.setConteIdseq("99BA9DC8-2095-4E69-E034-080020C9C0E0");
moduleInst.setAslName("DRAFT NEW");
moduleInst.setCreatedBy("Hyun Kim");
moduleInst.setDisplayOrder(7);
int res = test.createInstruction(moduleInst,"D45A49A8-167D-0422-E034-0003BA0B1A09");
System.out.println("\n*****Create Module Instruction Result 1: " + res);
}
catch (DMLException de) {
de.printStackTrace();
}
}
}
|
package smalipatchlib;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.regex.Pattern;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import dexequencelib.Console;
/**
* Represents .smali file and handles the details of modification operations and saving.
*
* @author Caleb Fenton
*/
public class SmaliFile {
class CodeModification implements Comparable {
public Integer Offset;
public String ReplaceWhat;
public String Value;
CodeModification(Integer o, String r, String v) {
Offset = o;
ReplaceWhat = r;
Value = v;
}
@Override
public boolean equals(Object o) {
if (o.getClass() != this.getClass()) {
return false;
}
return (o.hashCode() == hashCode());
}
@Override
public int hashCode() {
int hash = 7;
hash = (67 * hash) + (Offset != null ? Offset.hashCode() : 0);
hash = (67 * hash) + (ReplaceWhat != null ? ReplaceWhat.hashCode() : 0);
hash = (67 * hash) + (Value != null ? Value.hashCode() : 0);
return hash;
}
@Override
// Higher values should come first
// Because replaces / inserts happen in a loop and editing higher
// would throw off offsets for replaces/inserts after
public int compareTo(Object o) {
CodeModification cm = (CodeModification) o;
if (Offset == cm.Offset) {
return 0;
} else if (Offset > cm.Offset) {
return -1;
} else {
return 1;
}
}
}
/**
* Full path file name
*/
public String FullFilePath;
/**
* Path only after smali dump directory
*/
public String FileName;
/**
* File contents
*/
public String FileLines;
/**
* Clones of fingerprints matched against this file. Keys are fingerprint names.
*/
public HashMap<String, Fingerprint> Fingerprints;
/**
* Offsets for regions. Keys are region names.
*/
public HashMap<String, Integer[]> RegionOffsetList;
/**
* List of code modifications to perform
*/
public SortedSet<CodeModification> CodeModifications;
/**
* Notify the user if this file is matched?
*/
public boolean Notify;
public SmaliFile() {
Fingerprints = new HashMap<String, Fingerprint>();
RegionOffsetList = new HashMap<String, Integer[]>();
CodeModifications = new TreeSet<CodeModification>();
Notify = true;
}
public SmaliFile(File smaliFile) {
this();
if (!smaliFile.exists()) {
Console.die("Smali file does not exist: " + smaliFile + ".", -1);
}
FullFilePath = smaliFile.getPath();
FileName = smaliFile.getName();
try {
// UTF-8 encoding when reading/writing because some obfuscation
// or languages uses weird characters that wont compile otherwise
List<String> lines = FileUtils.readLines(smaliFile, "UTF-8");
StringBuilder buff = new StringBuilder();
for (String line : lines) {
buff.append(line).append(System.getProperty("line.separator"));
}
FileLines = buff.toString();
lines.clear();
} catch (IOException ex) {
Console.error("Exception reading " + smaliFile + ".\n" + ex);
}
}
public SmaliFile(String fileName) {
this(new File(fileName));
}
public SmaliFile(String fileName, InputStream is) {
this();
File f = new File(fileName);
FullFilePath = fileName;
FileName = f.getName();
StringBuilder lines = new StringBuilder();
List<String> read = null;
try {
read = IOUtils.readLines(is);
} catch (IOException ex) {
Console.error("Exception reading input stream.\n" + ex);
}
for (String s : read) {
lines.append(s);
}
FileLines = lines.toString();
}
@Override
public String toString() {
// strip beginning of path leading up to and including "smali/"
int pos = FullFilePath.indexOf("smali" + File.separator) + 6;
if (pos < 0) {
pos = 0;
}
return FullFilePath.substring(pos);
}
/**
* When regions are matched, they are added here.
*
* @param regionName
* name of the region
* @param regionStarts
* offset in file for start of region
* @param regionEnds
* offset in file for end of region
*/
public void addRegion(String regionName, int regionStarts, int regionEnds) {
RegionOffsetList.put(regionName, new Integer[] { regionStarts, regionEnds });
}
/**
* Add matched insert operation to list of modifications to perform.
*
* @param offset
* offset to insert
* @param codeInsert
* what to actually insert
*/
public void addInsert(Integer offset, String codeInsert) {
// DoModifcations knows to simply insert if second param empty ""
addModification(new CodeModification(offset, "", codeInsert));
}
/**
* Add matched replace operation to list of modifications to perform.
*
* @param offset
* offset to start replace
* @param replaceWhat
* regex of what to replace
* @param replaceWith
* what to actually replace it with
*/
public void addReplace(Integer offset, String replaceWhat, String replaceWith) {
addModification(new CodeModification(offset, replaceWhat, replaceWith));
}
/**
* Performs all inserts/replacements and then saves the lines.
*
* @return true on successful modification and file doModificationsAndSave, false otherwise
*/
public boolean doModificationsAndSave() {
// Must do all inserts at once, because offsets will change
if (!doModifications()) {
return false;
}
try {
Console.debug("Writing " + FileLines.length() + " chars to " + FullFilePath, 2);
File newF = new File(FullFilePath);
// Netbeans is durhur retarded and wont let me use this method
// FileUtils.write(newF, FileLines, "UTF-8");
FileUtils.writeStringToFile(newF, FileLines, "UTF-8");
if (!newF.exists()) {
Console.die("Was able to save " + FileName + " but it does not exist and no exception was thrown.");
}
} catch (IOException ex) {
Console.die("Unable to save " + FileName + ".\n" + ex);
return false;
}
return true;
}
private void addModification(CodeModification aCM) {
// if ( CodeModifications.contains(aCM) ) {
Console.debug("Setting modify in " + FileName + ": " + aCM.Value, 2);
CodeModifications.add(aCM);
}
/**
*
* @return
*/
protected boolean doModifications() {
// StringBuilder origLines = new StringBuilder(FileLines);
StringBuilder sb = new StringBuilder(FileLines);
boolean success = true;
for (CodeModification cm : CodeModifications) {
if (cm.ReplaceWhat.isEmpty()) {
Console.debug("Inserting @" + cm.Offset + ": " + cm.Value, 2);
sb.insert(cm.Offset, cm.Value);
// FileLines = FileLines.substring(0, cm.Offset)
// + cm.Value + FileLines.substring(cm.Offset, FileLines.length());
if ((sb.length() == FileLines.length())) {
Console.warn(FileName + ": Unable to insert @" + cm.Offset + ": " + cm.Value);
success = false;
}
} else {
Console.debug("Replacing @" + cm.Offset + ":\n" + cm.ReplaceWhat + " with " + cm.Value, 2);
int hashCode = sb.toString().hashCode();
String safeReplaceWhat = Pattern.quote(cm.ReplaceWhat);
String toReplace = sb.substring(cm.Offset);
String replaced = toReplace.replaceFirst(safeReplaceWhat, cm.Value);
sb.replace(cm.Offset, sb.length(), replaced);
// FileLines = FileLines.substring(0, cm.Offset)
// + FileLines.substring(cm.Offset).replaceFirst(
// safeReplaceWhat, cm.Value);
if (sb.toString().hashCode() == hashCode) {
Console.warn(FileName + ": Replace possibly did nothing @" + cm.Offset + ":\n" + cm.ReplaceWhat
+ " with " + cm.Value);
// success = false;
}
}
}
FileLines = sb.toString();
CodeModifications.clear();
return success;
}
}
|
package org.md2k.autosense.antradio.connection;
import android.os.RemoteException;
import android.util.Log;
import com.dsi.ant.channel.AntChannel;
import com.dsi.ant.channel.AntCommandFailedException;
import com.dsi.ant.channel.IAntChannelEventHandler;
import com.dsi.ant.message.ChannelId;
import com.dsi.ant.message.ChannelType;
import com.dsi.ant.message.fromant.AcknowledgedDataMessage;
import com.dsi.ant.message.fromant.BroadcastDataMessage;
import com.dsi.ant.message.fromant.ChannelEventMessage;
import com.dsi.ant.message.fromant.DataMessage;
import com.dsi.ant.message.fromant.MessageFromAntType;
import com.dsi.ant.message.ipc.AntMessageParcel;
import org.md2k.autosense.Constants;
import org.md2k.autosense.LoggerText;
import org.md2k.autosense.antradio.ChannelInfo;
import org.md2k.autosense.devices.AutoSensePlatform;
import org.md2k.datakitapi.time.DateTime;
public class ChannelController {
private static final String TAG = ChannelController.class.getSimpleName();
private AntChannel mAntChannel;
private ChannelBroadcastListener mChannelBroadcastListener;
private ChannelEventCallback mChannelEventCallback = new ChannelEventCallback();
private ChannelInfo mChannelInfo;
private boolean mIsOpen;
public ChannelController(AntChannel antChannel, AutoSensePlatform autoSensePlatform,
ChannelBroadcastListener broadcastListener) {
mAntChannel = antChannel;
mChannelInfo = new ChannelInfo(autoSensePlatform);
mChannelBroadcastListener = broadcastListener;
openChannel();
}
public boolean openChannel() {
if (null != mAntChannel) {
if (mIsOpen) {
Log.w(TAG, "Channel was already open");
} else {
ChannelType channelType = ChannelType.BIDIRECTIONAL_SLAVE;
// Channel ID message contains device number, type and transmission type. In
// order for master (TX) channels and slave (RX) channels to connect, they
// must have the same channel ID, or wildcard (0) is used.
ChannelId channelId = new ChannelId(mChannelInfo.DEVICE_NUMBER,
mChannelInfo.CHANNEL_PROOF_DEVICE_TYPE, mChannelInfo.CHANNEL_PROOF_TRANSMISSION_TYPE);
try {
// Setting the channel event handler so that we can receive messages from ANT
mAntChannel.setChannelEventHandler(mChannelEventCallback);
// Performs channel assignment by assigning the type to the channel. Additional
// features (such as, background scanning and frequency agility) can be enabled
// by passing an ExtendedAssignment object to assign(ChannelType, ExtendedAssignment).
mAntChannel.assign(channelType);
mAntChannel.setChannelId(channelId);
mAntChannel.setPeriod(mChannelInfo.CHANNEL_PROOF_PERIOD);
mAntChannel.setRfFrequency(mChannelInfo.CHANNEL_PROOF_FREQUENCY);
mAntChannel.open();
mIsOpen = true;
Log.d(TAG, "Opened channel with device number: " + mChannelInfo.DEVICE_NUMBER);
} catch (RemoteException e) {
close();
channelError(e);
} catch (AntCommandFailedException e) {
// This will release, and therefore unassign if required
close();
channelError("Open failed", e);
}
}
} else {
Log.w(TAG, "No channel available");
}
return mIsOpen;
}
public ChannelInfo getCurrentInfo() {
return mChannelInfo;
}
void displayChannelError(String displayText) {
mChannelInfo.die(displayText);
}
void channelError(RemoteException e) {
String logString = "Remote service communication failed.";
Log.e(TAG, logString);
displayChannelError(logString);
}
void channelError(String error, AntCommandFailedException e) {
close();
}
public void close() {
// TODO kill all our resources
if (null != mAntChannel) {
mIsOpen = false;
// Releasing the channel to make it available for others.
// After releasing, the AntChannel instance cannot be reused.
try {
mAntChannel.clearChannelEventHandler();
} catch (RemoteException ignored) {
Log.d(TAG,"error");
}
mAntChannel.release();
try {
Thread.sleep(100);
} catch (InterruptedException e) {
e.printStackTrace();
}
mAntChannel = null;
}
displayChannelError("Channel Closed");
}
static public abstract class ChannelBroadcastListener {
public abstract void onBroadcastChanged(ChannelInfo newInfo);
}
/**
* Implements the Channel Event Handler Interface so that messages can be
* received and channel death events can be handled.
*/
public class ChannelEventCallback implements IAntChannelEventHandler {
private void updateData(DataMessage data) {
if(!mIsOpen) return;
mChannelInfo.status = 0;
mChannelInfo.broadcastData = data.getMessageContent();
mChannelInfo.timestamp= DateTime.getDateTime();
mChannelBroadcastListener.onBroadcastChanged(mChannelInfo);
}
private void sendError(String msg) {
mChannelInfo.broadcastData = msg.getBytes();
mChannelInfo.status = 1;
mChannelBroadcastListener.onBroadcastChanged(mChannelInfo);
}
@Override
public void onChannelDeath() {
// Display channel death message when channel dies
displayChannelError("Channel Death");
}
@Override
public void onReceiveMessage(MessageFromAntType messageType, AntMessageParcel antParcel) {
// Switching on message type to handle different types of messages
if(Constants.LOG_TEXT){
String str=String.valueOf(DateTime.getDateTime())+","+antParcel+"\n";
LoggerText.getInstance().saveDataToTextFile(str);
}
switch (messageType) {
// If data message, construct from parcel and update channel data
case BROADCAST_DATA:
// Rx Data
updateData(new BroadcastDataMessage(antParcel));
break;
case ACKNOWLEDGED_DATA:
// Rx Data
updateData(new AcknowledgedDataMessage(antParcel));
break;
case CHANNEL_EVENT:
// Constructing channel event message from parcel
ChannelEventMessage eventMessage = new ChannelEventMessage(antParcel);
// Switching on event code to handle the different types of channel events
switch (eventMessage.getEventCode()) {
case TX:
// Use old info as this is what remote device has just received
Log.d(TAG, "TX = " + mChannelInfo.broadcastData);
mChannelBroadcastListener.onBroadcastChanged(mChannelInfo);
mChannelInfo.broadcastData[0]++;
if (mIsOpen) {
try {
// Setting the data to be broadcast on the next channel period
mAntChannel.setBroadcastData(mChannelInfo.broadcastData);
} catch (RemoteException e) {
channelError(e);
}
}
break;
case RX_SEARCH_TIMEOUT:
// TODO May want to keep searching
displayChannelError("No Device Found");
sendError("No Device Found");
break;
case CHANNEL_CLOSED:
case CHANNEL_COLLISION:
case RX_FAIL:
case RX_FAIL_GO_TO_SEARCH:
case TRANSFER_RX_FAILED:
case TRANSFER_TX_COMPLETED:
case TRANSFER_TX_FAILED:
case TRANSFER_TX_START:
case UNKNOWN:
// TODO More complex communication will need to handle these events
break;
}
break;
case ANT_VERSION:
case BURST_TRANSFER_DATA:
case CAPABILITIES:
case CHANNEL_ID:
case CHANNEL_RESPONSE:
case CHANNEL_STATUS:
case SERIAL_NUMBER:
case OTHER:
// TODO More complex communication will need to handle these message types
break;
}
}
}
}
|
package com.smartdevicelink.managers.screen;
import android.support.annotation.NonNull;
import android.util.Log;
import com.smartdevicelink.managers.BaseSubManager;
import com.smartdevicelink.managers.CompletionListener;
import com.smartdevicelink.managers.file.FileManager;
import com.smartdevicelink.managers.file.MultipleFileCompletionListener;
import com.smartdevicelink.managers.file.filetypes.SdlArtwork;
import com.smartdevicelink.protocol.enums.FunctionID;
import com.smartdevicelink.proxy.RPCNotification;
import com.smartdevicelink.proxy.RPCResponse;
import com.smartdevicelink.proxy.interfaces.ISdl;
import com.smartdevicelink.proxy.interfaces.OnSystemCapabilityListener;
import com.smartdevicelink.proxy.rpc.DisplayCapabilities;
import com.smartdevicelink.proxy.rpc.MetadataTags;
import com.smartdevicelink.proxy.rpc.OnHMIStatus;
import com.smartdevicelink.proxy.rpc.Show;
import com.smartdevicelink.proxy.rpc.TextField;
import com.smartdevicelink.proxy.rpc.enums.HMILevel;
import com.smartdevicelink.proxy.rpc.enums.MetadataType;
import com.smartdevicelink.proxy.rpc.enums.Result;
import com.smartdevicelink.proxy.rpc.enums.SystemCapabilityType;
import com.smartdevicelink.proxy.rpc.enums.TextAlignment;
import com.smartdevicelink.proxy.rpc.enums.TextFieldName;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCNotificationListener;
import com.smartdevicelink.proxy.rpc.listeners.OnRPCResponseListener;
import com.smartdevicelink.util.DebugTool;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static com.smartdevicelink.proxy.rpc.enums.TextAlignment.CENTERED;
/**
* <strong>TextAndGraphicManager</strong> <br>
*
* Note: This class must be accessed through the SdlManager. Do not instantiate it by itself. <br>
*
*/
abstract class BaseTextAndGraphicManager extends BaseSubManager {
private static final String TAG = "TextAndGraphicManager";
boolean isDirty, hasQueuedUpdate;
volatile Show inProgressUpdate;
Show currentScreenData, queuedImageUpdate;
HMILevel currentHMILevel;
protected DisplayCapabilities displayCapabilities;
private boolean pendingHMIFull, batchingUpdates;
private final WeakReference<FileManager> fileManager;
private final WeakReference<SoftButtonManager> softButtonManager;
private CompletionListener queuedUpdateListener, inProgressListener, pendingHMIListener;
SdlArtwork blankArtwork;
private OnRPCNotificationListener hmiListener;
private OnSystemCapabilityListener onDisplayCapabilitiesListener;
private SdlArtwork primaryGraphic, secondaryGraphic;
private TextAlignment textAlignment;
private String textField1, textField2, textField3, textField4, mediaTrackTextField;
private MetadataType textField1Type, textField2Type, textField3Type, textField4Type;
//Constructors
BaseTextAndGraphicManager(@NonNull ISdl internalInterface, @NonNull FileManager fileManager, @NonNull SoftButtonManager softButtonManager) {
// set class vars
super(internalInterface);
this.fileManager = new WeakReference<>(fileManager);
this.softButtonManager = new WeakReference<>(softButtonManager);
batchingUpdates = false;
isDirty = false;
pendingHMIFull = false;
textAlignment = CENTERED;
currentHMILevel = HMILevel.HMI_NONE;
currentScreenData = new Show();
addListeners();
getBlankArtwork();
}
@Override
public void start(CompletionListener listener) {
transitionToState(READY);
super.start(listener);
}
@Override
public void dispose(){
textField1 = null;
textField1Type = null;
textField2 = null;
textField2Type = null;
textField3 = null;
textField3Type = null;
textField4 = null;
textField4Type = null;
mediaTrackTextField = null;
textAlignment = null;
primaryGraphic = null;
secondaryGraphic = null;
blankArtwork = null;
displayCapabilities = null;
inProgressUpdate = null;
queuedImageUpdate = null;
currentScreenData = null;
queuedUpdateListener = null;
pendingHMIListener = null;
inProgressListener = null;
hasQueuedUpdate = false;
isDirty = false;
pendingHMIFull = false;
// remove listeners
internalInterface.removeOnRPCNotificationListener(FunctionID.ON_HMI_STATUS, hmiListener);
internalInterface.removeOnSystemCapabilityListener(SystemCapabilityType.DISPLAY, onDisplayCapabilitiesListener);
super.dispose();
}
private void addListeners() {
// add listener
hmiListener = new OnRPCNotificationListener() {
@Override
public void onNotified(RPCNotification notification) {
currentHMILevel = ((OnHMIStatus)notification).getHmiLevel();
if (currentHMILevel == HMILevel.HMI_FULL){
if (pendingHMIFull){
DebugTool.logInfo( "Acquired HMI_FULL with pending update. Sending now");
pendingHMIFull = false;
sdlUpdate(pendingHMIListener);
pendingHMIListener = null;
}
}
}
};
internalInterface.addOnRPCNotificationListener(FunctionID.ON_HMI_STATUS, hmiListener);
// Add OnDisplayCapabilitiesListener to keep displayCapabilities updated
onDisplayCapabilitiesListener = new OnSystemCapabilityListener() {
@Override
public void onCapabilityRetrieved(Object capability) {
displayCapabilities = (DisplayCapabilities)capability;
}
@Override
public void onError(String info) {
Log.e(TAG, "DISPLAY Capability cannot be retrieved:");
displayCapabilities = null;
}
};
this.internalInterface.addOnSystemCapabilityListener(SystemCapabilityType.DISPLAY, onDisplayCapabilitiesListener);
}
// Upload / Send
protected void update(CompletionListener listener) {
// check if is batch update
if (batchingUpdates) {
return;
}
if (isDirty){
isDirty = false;
sdlUpdate(listener);
} else if (listener != null) {
listener.onComplete(true);
}
}
private synchronized void sdlUpdate(CompletionListener listener){
// make sure hmi is not none
if (currentHMILevel == null || currentHMILevel == HMILevel.HMI_NONE){
//Trying to send show on HMI_NONE, waiting for full
pendingHMIFull = true;
if (listener != null){
pendingHMIListener = listener;
}
return;
}
//Updating Text and Graphics
if (inProgressUpdate != null){
//In progress update exists, queueing update
if (queuedUpdateListener != null){
//Queued update already exists, superseding previous queued update
queuedUpdateListener.onComplete(false);
queuedUpdateListener = null;
}
if (listener != null){
queuedUpdateListener = listener;
}
hasQueuedUpdate = true;
return;
}
Show fullShow = new Show();
fullShow.setAlignment(textAlignment);
fullShow = assembleShowText(fullShow);
fullShow = assembleShowImages(fullShow);
inProgressListener = listener;
if (!shouldUpdatePrimaryImage() && !shouldUpdateSecondaryImage()){
//No Images to send, only sending text
inProgressUpdate = extractTextFromShow(fullShow);
sendShow();
}else if (!sdlArtworkNeedsUpload(primaryGraphic) && (secondaryGraphic == blankArtwork || !sdlArtworkNeedsUpload(secondaryGraphic))){
//Images already uploaded, sending full update
// The files to be updated are already uploaded, send the full show immediately
inProgressUpdate = fullShow;
sendShow();
} else{
// Images need to be uploaded, sending text and uploading images
inProgressUpdate = fullShow;
final Show thisUpdate = fullShow;
uploadImages(new CompletionListener() {
@Override
public void onComplete(boolean success) {
if (!success){
Log.e(TAG, "Error uploading image");
inProgressUpdate = extractTextFromShow(inProgressUpdate);
sendShow();
}
// Check if queued image update still matches our images (there could have been a new Show in the meantime)
// and send a new update if it does. Since the images will already be on the head unit, the whole show will be sent
if (thisUpdate.getGraphic() != null && thisUpdate.getGraphic().equals(queuedImageUpdate.getGraphic()) ||
(thisUpdate.getSecondaryGraphic() != null && queuedImageUpdate.getSecondaryGraphic() != null) && thisUpdate.getSecondaryGraphic().equals(queuedImageUpdate.getSecondaryGraphic())){
// Queued image update matches the images we need, sending update
sendShow();
}
// Else, Queued image update does not match the images we need, skipping update
}
});
queuedImageUpdate = fullShow;
}
}
private void sendShow(){
inProgressUpdate.setOnRPCResponseListener(new OnRPCResponseListener() {
@Override
public void onResponse(int correlationId, RPCResponse response) {
handleResponse(response.getSuccess());
}
@Override
public void onError(int correlationId, Result resultCode, String info) {
handleResponse(false);
}
private void handleResponse(boolean success){
if (success){
updateCurrentScreenDataState(inProgressUpdate);
}
inProgressUpdate = null;
if (inProgressListener != null){
inProgressListener.onComplete(success);
inProgressListener = null;
}
if (hasQueuedUpdate){
//Queued update exists, sending another update
hasQueuedUpdate = false;
CompletionListener temp = queuedUpdateListener;
queuedUpdateListener = null;
sdlUpdate(temp);
}
}
});
if (this.softButtonManager.get() != null) {
this.softButtonManager.get().setCurrentMainField1(inProgressUpdate.getMainField1());
}
internalInterface.sendRPCRequest(inProgressUpdate);
}
// Images
private void uploadImages(final CompletionListener listener) {
List<SdlArtwork> artworksToUpload = new ArrayList<>();
// add primary image
if (shouldUpdatePrimaryImage() && !primaryGraphic.isStaticIcon()){
artworksToUpload.add(primaryGraphic);
}
// add secondary image
if (shouldUpdateSecondaryImage() && !secondaryGraphic.isStaticIcon()){
artworksToUpload.add(secondaryGraphic);
}
if (artworksToUpload.size() == 0 && (primaryGraphic.isStaticIcon() || secondaryGraphic.isStaticIcon())){
DebugTool.logInfo("Upload attempted on static icons, sending them without upload instead");
listener.onComplete(true);
}
// use file manager to upload art
if (fileManager.get() != null) {
fileManager.get().uploadArtworks(artworksToUpload, new MultipleFileCompletionListener() {
@Override
public void onComplete(Map<String, String> errors) {
if (errors != null) {
Log.e(TAG, "Error Uploading Artworks. Error: " + errors.toString());
listener.onComplete(false);
} else {
listener.onComplete(true);
}
}
});
}
}
private Show assembleShowImages(Show show){
if (shouldUpdatePrimaryImage()){
show.setGraphic(primaryGraphic.getImageRPC());
}
if (shouldUpdateSecondaryImage()){
show.setSecondaryGraphic(secondaryGraphic.getImageRPC());
}
return show;
}
// Text
Show assembleShowText(Show show){
show = setBlankTextFields(show);
if (mediaTrackTextField != null){
show.setMediaTrack(mediaTrackTextField);
}
List<String> nonNullFields = findValidMainTextFields();
if (nonNullFields.isEmpty()){
return show;
}
int numberOfLines = getNumberOfLines();
switch (numberOfLines) {
case 1: show = assembleOneLineShowText(show, nonNullFields);
break;
case 2: show = assembleTwoLineShowText(show);
break;
case 3: show = assembleThreeLineShowText(show);
break;
case 4: show = assembleFourLineShowText(show);
break;
}
return show;
}
private Show assembleOneLineShowText(Show show, List<String> showFields){
StringBuilder showString1 = new StringBuilder();
for (int i = 0; i < showFields.size(); i++) {
if (i > 0) {
showString1.append(" - ").append(showFields.get(i));
}else{
showString1.append(showFields.get(i));
}
}
show.setMainField1(showString1.toString());
MetadataTags tags = new MetadataTags();
tags.setMainField1(findNonNullMetadataFields());
show.setMetadataTags(tags);
return show;
}
private Show assembleTwoLineShowText(Show show){
StringBuilder tempString = new StringBuilder();
MetadataTags tags = new MetadataTags();
if (textField1 != null && textField1.length() > 0) {
tempString.append(textField1);
if (textField1Type != null){
tags.setMainField1(textField1Type);
}
}
if (textField2 != null && textField2.length() > 0) {
if (( textField3 == null || !(textField3.length() > 0)) && (textField4 == null || !(textField4.length() > 0))){
// text does not exist in slots 3 or 4, put text2 in slot 2
show.setMainField2(textField2);
if (textField2Type != null){
tags.setMainField2(textField2Type);
}
} else if (textField1 != null && textField1.length() > 0) {
// If text 1 exists, put it in slot 1 formatted
tempString.append(" - ").append(textField2);
if (textField2Type != null){
List<MetadataType> typeList = new ArrayList<>();
typeList.add(textField2Type);
if (textField1Type != null){
typeList.add(textField1Type);
}
tags.setMainField1(typeList);
}
}else {
// If text 1 does not exist, put it in slot 1 unformatted
tempString.append(textField2);
if (textField2Type != null){
tags.setMainField1(textField2Type);
}
}
}
// set mainfield 1
show.setMainField1(tempString.toString());
// new stringbuilder object
tempString = new StringBuilder();
if (textField3 != null && textField3.length() > 0){
// If text 3 exists, put it in slot 2
tempString.append(textField3);
if (textField3Type != null){
List<MetadataType> typeList = new ArrayList<>();
typeList.add(textField3Type);
tags.setMainField2(typeList);
}
}
if (textField4 != null && textField4.length() > 0){
if (textField3 != null && textField3.length() > 0){
// If text 3 exists, put it in slot 2 formatted
tempString.append(" - ").append(textField4);
if (textField4Type != null){
List<MetadataType> typeList = new ArrayList<>();
typeList.add(textField4Type);
if (textField3Type != null){
typeList.add(textField3Type);
}
tags.setMainField2(typeList);
}
} else {
// If text 3 does not exist, put it in slot 3 unformatted
tempString.append(textField4);
if (textField4Type != null){
tags.setMainField2(textField4Type);
}
}
}
if (tempString.toString().length() > 0){
show.setMainField2(tempString.toString());
}
show.setMetadataTags(tags);
return show;
}
private Show assembleThreeLineShowText(Show show){
MetadataTags tags = new MetadataTags();
if (textField1 != null && textField1.length() > 0) {
show.setMainField1(textField1);
if (textField1Type != null){
tags.setMainField1(textField1Type);
}
}
if (textField2 != null && textField2.length() > 0) {
show.setMainField2(textField2);
if (textField2Type != null){
tags.setMainField2(textField2Type);
}
}
StringBuilder tempString = new StringBuilder();
if (textField3 != null && textField3.length() > 0){
tempString.append(textField3);
if (textField3Type != null){
tags.setMainField3(textField3Type);
}
}
if (textField4 != null && textField4.length() > 0) {
if (textField3 != null && textField3.length() > 0) {
// If text 3 exists, put it in slot 3 formatted
tempString.append(" - ").append(textField4);
if (textField4Type != null){
List<MetadataType> tags4 = new ArrayList<>();
if (textField3Type != null){
tags4.add(textField3Type);
}
tags4.add(textField4Type);
tags.setMainField3(tags4);
}
} else {
// If text 3 does not exist, put it in slot 3 formatted
tempString.append(textField4);
if (textField4Type != null){
tags.setMainField3(textField4Type);
}
}
}
show.setMainField3(tempString.toString());
show.setMetadataTags(tags);
return show;
}
private Show assembleFourLineShowText(Show show){
MetadataTags tags = new MetadataTags();
if (textField1 != null && textField1.length() > 0) {
show.setMainField1(textField1);
if (textField1Type != null){
tags.setMainField1(textField1Type);
}
}
if (textField2 != null && textField2.length() > 0) {
show.setMainField2(textField2);
if (textField2Type != null){
tags.setMainField2(textField2Type);
}
}
if (textField3 != null && textField3.length() > 0) {
show.setMainField3(textField3);
if (textField3Type != null){
tags.setMainField3(textField3Type);
}
}
if (textField4 != null && textField4.length() > 0) {
show.setMainField4(textField4);
if (textField4Type != null){
tags.setMainField4(textField4Type);
}
}
show.setMetadataTags(tags);
return show;
}
// Extraction
Show extractTextFromShow(Show show){
Show newShow = new Show();
newShow.setMainField1(show.getMainField1());
newShow.setMainField2(show.getMainField2());
newShow.setMainField3(show.getMainField3());
newShow.setMainField4(show.getMainField4());
return newShow;
}
private Show setBlankTextFields(Show newShow){
newShow.setMainField1("");
newShow.setMainField2("");
newShow.setMainField3("");
newShow.setMainField4("");
newShow.setMediaTrack("");
return newShow;
}
private void updateCurrentScreenDataState(Show show){
if (show == null){
Log.e(TAG, "can not updateCurrentScreenDataFromShow from null show");
return;
}
// If the items are null, they were not updated, so we can't just set it directly
if (show.getMainField1() != null){
currentScreenData.setMainField1(show.getMainField1());
}
if (show.getMainField2() != null){
currentScreenData.setMainField2(show.getMainField2());
}
if (show.getMainField3() != null){
currentScreenData.setMainField3(show.getMainField3());
}
if (show.getMainField4() != null){
currentScreenData.setMainField4(show.getMainField4());
}
if (show.getMediaTrack() != null){
currentScreenData.setMediaTrack(show.getMediaTrack());
}
if (show.getMetadataTags() != null){
currentScreenData.setMetadataTags(show.getMetadataTags());
}
if (show.getAlignment() != null){
currentScreenData.setAlignment(show.getAlignment());
}
if (show.getGraphic() != null){
currentScreenData.setGraphic(show.getGraphic());
}
if (show.getSecondaryGraphic() != null){
currentScreenData.setSecondaryGraphic(show.getSecondaryGraphic());
}
}
// Helpers
private List<String> findValidMainTextFields(){
List<String> array = new ArrayList<>();
if (textField1 != null && textField1.length() > 0) {
array.add(textField1);
}
if (textField2 != null && textField2.length() > 0) {
array.add(textField2);
}
if (textField3 != null && textField3.length() > 0) {
array.add(textField3);
}
if (textField4 != null && textField4.length() > 0) {
array.add(textField4);
}
return array;
}
private List<MetadataType> findNonNullMetadataFields(){
List<MetadataType> array = new ArrayList<>();
if (textField1Type != null) {
array.add(textField1Type);
}
if (textField2Type != null) {
array.add(textField2Type);
}
if (textField3Type != null) {
array.add(textField3Type);
}
if (textField4Type != null) {
array.add(textField4Type);
}
return array;
}
abstract SdlArtwork getBlankArtwork();
private boolean sdlArtworkNeedsUpload(SdlArtwork artwork){
if (fileManager.get() != null) {
return artwork != null && !fileManager.get().hasUploadedFile(artwork) && !artwork.isStaticIcon();
}
return false;
}
private boolean shouldUpdatePrimaryImage() {
if (displayCapabilities == null || displayCapabilities.getGraphicSupported()) {
if (currentScreenData.getGraphic() == null && primaryGraphic != null) {
return true;
} else if (currentScreenData.getGraphic() == null && primaryGraphic == null) {
return false;
}
return currentScreenData != null && (primaryGraphic != null && !currentScreenData.getGraphic().getValue().equalsIgnoreCase(primaryGraphic.getName()));
}
return false;
}
private boolean shouldUpdateSecondaryImage() {
// Cannot detect if there is a secondary image, so we'll just try to detect if there's a primary image and allow it if there is.
if (displayCapabilities == null || displayCapabilities.getGraphicSupported()) {
if (currentScreenData.getGraphic() == null && secondaryGraphic != null) {
return true;
} else if (currentScreenData.getGraphic() == null && secondaryGraphic == null) {
return false;
}
return currentScreenData != null && (secondaryGraphic != null && !currentScreenData.getGraphic().getValue().equalsIgnoreCase(secondaryGraphic.getName()));
}
return false;
}
int getNumberOfLines() {
if (displayCapabilities == null){
return 4;
}
int linesFound = 0;
List<TextField> textFields = displayCapabilities.getTextFields();
TextFieldName name;
for (TextField field : textFields) {
if (field.getName() != null) {
name = field.getName();
if (name == TextFieldName.mainField1 || name == TextFieldName.mainField2 || name == TextFieldName.mainField3 || name == TextFieldName.mainField4) {
linesFound += 1;
}
}
}
return linesFound;
}
// SCREEN ITEM SETTERS AND GETTERS
void setTextAlignment(TextAlignment textAlignment){
this.textAlignment = textAlignment;
// If we aren't batching, send the update immediately, if we are, set ourselves as dirty (so we know we should send an update after the batch ends)
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
TextAlignment getTextAlignment(){
return textAlignment;
}
void setMediaTrackTextField(String mediaTrackTextField){
this.mediaTrackTextField = mediaTrackTextField;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
String getMediaTrackTextField(){
return mediaTrackTextField;
}
void setTextField1(String textField1){
this.textField1 = textField1;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
String getTextField1(){
return textField1;
}
void setTextField2(String textField2){
this.textField2 = textField2;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
String getTextField2(){
return textField2;
}
void setTextField3(String textField3){
this.textField3 = textField3;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
String getTextField3(){
return textField3;
}
void setTextField4(String textField4){
this.textField4 = textField4;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
String getTextField4(){
return textField4;
}
void setTextField1Type(MetadataType textField1Type){
this.textField1Type = textField1Type;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
MetadataType getTextField1Type(){
return textField1Type;
}
void setTextField2Type(MetadataType textField2Type){
this.textField2Type = textField2Type;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
MetadataType getTextField2Type(){
return textField2Type;
}
void setTextField3Type(MetadataType textField3Type){
this.textField3Type = textField3Type;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
MetadataType getTextField3Type(){
return textField3Type;
}
void setTextField4Type(MetadataType textField4Type){
this.textField4Type = textField4Type;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
MetadataType getTextField4Type(){
return textField4Type;
}
void setPrimaryGraphic(SdlArtwork primaryGraphic){
this.primaryGraphic = primaryGraphic;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
SdlArtwork getPrimaryGraphic(){
return primaryGraphic;
}
void setSecondaryGraphic(SdlArtwork secondaryGraphic){
this.secondaryGraphic = secondaryGraphic;
if (!batchingUpdates){
sdlUpdate(null);
}else{
isDirty = true;
}
}
SdlArtwork getSecondaryGraphic(){
return secondaryGraphic;
}
void setBatchUpdates(boolean batching){
this.batchingUpdates = batching;
}
}
|
package org.cytoscape.biopax.internal.action;
import static org.cytoscape.biopax.internal.BioPaxMapper.BIOPAX_ENTITY_TYPE;
import java.util.Iterator;
import javax.swing.SwingUtilities;
import org.cytoscape.application.CyApplicationManager;
import org.cytoscape.application.events.SetCurrentNetworkViewEvent;
import org.cytoscape.application.events.SetCurrentNetworkViewListener;
import org.cytoscape.biopax.internal.util.BioPaxUtil;
import org.cytoscape.biopax.internal.util.BioPaxVisualStyleUtil;
import org.cytoscape.biopax.internal.view.BioPaxContainer;
import org.cytoscape.biopax.internal.view.BioPaxDetailsPanel;
import org.cytoscape.model.CyNetwork;
import org.cytoscape.model.CyNode;
import org.cytoscape.model.CyRow;
import org.cytoscape.model.events.RowsSetEvent;
import org.cytoscape.model.events.RowsSetListener;
import org.cytoscape.view.model.CyNetworkView;
import org.cytoscape.view.model.View;
import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedEvent;
import org.cytoscape.view.model.events.NetworkViewAboutToBeDestroyedListener;
import org.cytoscape.view.model.events.NetworkViewAddedEvent;
import org.cytoscape.view.model.events.NetworkViewAddedListener;
import org.cytoscape.view.presentation.property.BasicVisualLexicon;
import org.cytoscape.view.vizmap.VisualMappingManager;
import org.cytoscape.view.vizmap.VisualStyle;
/**
* Listens for Network Events, and takes appropriate Actions.
* May be subclassed.
*
* @author Ethan Cerami, Gary Bader, Chris Sander, Benjamin Gross, Igor Rodchenkov.
*/
public class BioPaxViewTracker implements NetworkViewAddedListener,
NetworkViewAboutToBeDestroyedListener, SetCurrentNetworkViewListener, RowsSetListener {
private final BioPaxDetailsPanel bpPanel;
private final BioPaxContainer bpContainer;
private final CyApplicationManager cyApplicationManager;
private final VisualMappingManager visualMappingManager;
private final BioPaxVisualStyleUtil bioPaxVisualStyleUtil;
/**
* Constructor.
*
* @param bpPanel BioPaxDetails Panel Object.
*/
public BioPaxViewTracker(BioPaxDetailsPanel bpPanel,
BioPaxContainer bpContainer,
CyApplicationManager cyApplicationManager,
VisualMappingManager visualMappingManager,
BioPaxVisualStyleUtil bioPaxVisualStyleUtil)
{
this.bpPanel = bpPanel;
this.bpContainer = bpContainer;
this.cyApplicationManager = cyApplicationManager;
this.visualMappingManager = visualMappingManager;
this.bioPaxVisualStyleUtil = bioPaxVisualStyleUtil;
}
/**
* Network Created Event
*/
@Override
public void handleEvent(NetworkViewAddedEvent e) {
final CyNetworkView view = e.getNetworkView();
if(BioPaxUtil.isBioPAXNetwork(view.getModel())) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
bpContainer.showLegend();
bpPanel.resetText();
// apply BioPAX visual style and set tool tips
setNodeToolTips(view);
VisualStyle bioPaxVisualStyle = bioPaxVisualStyleUtil.getBioPaxVisualStyle();
visualMappingManager.setVisualStyle(bioPaxVisualStyle, view);
bioPaxVisualStyle.apply(view);
view.updateView();
}
});
}
}
/**
* Network Focus Event.
*/
@Override
public void handleEvent(SetCurrentNetworkViewEvent e) {
CyNetworkView view = e.getNetworkView();
// update bpPanel accordingly
if (view != null && BioPaxUtil.isBioPAXNetwork(view.getModel())) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
bpPanel.resetText();
}
});
}
}
@Override
public void handleEvent(NetworkViewAboutToBeDestroyedEvent e) {
if (BioPaxUtil.isBioPAXNetwork(e.getNetworkView().getModel())) {
//TODO nothing?
}
}
@Override
public void handleEvent(RowsSetEvent e) {
CyNetworkView view = cyApplicationManager.getCurrentNetworkView();
if(view == null) return;
final CyNetwork network = view.getModel();
if (BioPaxUtil.isBioPAXNetwork(network)) {
if (!network.getDefaultNodeTable().equals(e.getSource()))
return;
try {
CyNode selected = null;
for (CyNode node : network.getNodeList()) {
if (network.getRow(node).get(CyNetwork.SELECTED, Boolean.class)) {
selected = node;
break;
}
}
if (selected != null) {
final CyNode node = selected;
SwingUtilities.invokeLater(new Runnable() {
public void run() {
// Show the details
bpPanel.showDetails(network, node);
// If legend is showing, show details
bpContainer.showDetails();
}
});
}
} finally {
// update custom nodes
customNodes(view);
}
}
}
private void setNodeToolTips(CyNetworkView networkView) {
// iterate through the nodes
CyNetwork network = networkView.getModel();
for (CyNode node : network.getNodeList()) {
CyRow row = network.getRow(node);
String tip = row.get(BIOPAX_ENTITY_TYPE, String.class) + "\n"
+ row.get("/cellularLocation", String.class);
View<CyNode> nodeView = networkView.getNodeView(node);
nodeView.setLockedValue(BasicVisualLexicon.NODE_TOOLTIP, tip);
}
}
private static void customNodes(CyNetworkView networkView) {
// grab node attributes
CyNetwork cyNetwork = networkView.getModel();
// iterate through the nodes
Iterator<CyNode> nodesIt = cyNetwork.getNodeList().iterator();
if (nodesIt.hasNext()) {
// grab the node
CyNode node = nodesIt.next();
// get chemical modifications
int count = 0;
boolean isPhosphorylated = false;
// TODO: MultiHashMap
// MultiHashMapDefinition mhmdef = nodeAttributes.getMultiHashMapDefinition();
// if (mhmdef.getAttributeValueType(BIOPAX_CHEMICAL_MODIFICATIONS_MAP) != -1) {
// MultiHashMap mhmap = nodeAttributes.getMultiHashMap();
// CountedIterator modsIt = mhmap.getAttributeKeyspan(node.getIdentifier(),
// BIOPAX_CHEMICAL_MODIFICATIONS_MAP, null);
// // do we have phosphorylation ?
// while (modsIt.hasNext()) {
// String modification = (String) modsIt.next();
// if (modification.equals(BioPaxUtil.PHOSPHORYLATION_SITE)) {
// isPhosphorylated = true;
// Object[] key = { BioPaxUtil.PHOSPHORYLATION_SITE };
// String countStr = (String) mhmap.getAttributeValue(node.getIdentifier(),
// BIOPAX_CHEMICAL_MODIFICATIONS_MAP, key);
// count = ((Integer) Integer.valueOf(countStr)).intValue();
// break;
// if phosphorylated, add custom node
if (isPhosphorylated) {
addCustomShapes(networkView, node, "PHOSPHORYLATION_GRAPHICS", count);
}
}
}
/**
* Based on given arguments, adds proper custom node shape to node.
*/
private static void addCustomShapes(CyNetworkView networkView, CyNode node, String shapeType,
int modificationCount) {
// TODO: Custom graphics
// // create refs to help views
// CyNetwork cyNetwork = networkView.getModel();
// View<CyNode> nodeView = networkView.getNodeView(node);
// DNodeView dingNodeView = (DNodeView) nodeView;
// // remove existing custom nodes
// Iterator<CustomGraphic> it = dingNodeView.customGraphicIterator();
// while ( it.hasNext() ) {
// dingNodeView.removeCustomGraphic( it.next() );
// for (int lc = 0; lc < modificationCount; lc++) {
// // set image
// BufferedImage image = null;
// if (shapeType.equals(PHOSPHORYLATION_GRAPHICS)) {
// image = (cyNetwork.isSelected(node)) ? customPhosGraphics[lc] : phosNode;
// // set rect
// Rectangle2D rect = getCustomShapeRect(image, lc);
// // create our texture paint
// Paint paint = null;
// try {
// paint = new java.awt.TexturePaint(image, rect);
// } catch (Exception exc) {
// paint = java.awt.Color.black;
// // add the graphic
// dingNodeView.addCustomGraphic(rect, paint, NodeDetails.ANCHOR_CENTER);
}
}
|
package com.cee.news.client.workingset;
import java.util.List;
import com.cee.news.client.list.AddRemoveListModel;
import com.cee.news.client.list.ListPanel;
import com.cee.news.client.list.SelectionListEditor;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.editor.client.Editor;
import com.google.gwt.editor.client.IsEditor;
import com.google.gwt.editor.client.adapters.SimpleEditor;
import com.google.gwt.editor.ui.client.adapters.ValueBoxEditor;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.event.dom.client.HasClickHandlers;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.DialogBox;
import com.google.gwt.user.client.ui.HasText;
import com.google.gwt.user.client.ui.InlineLabel;
import com.google.gwt.user.client.ui.LayoutPanel;
import com.google.gwt.user.client.ui.TextBox;
public class WorkingSetEditor extends DialogBox implements Editor<WorkingSetData>, WorkingSetView {
private InlineLabel labelErrorMessage;
private TextBox newNameEditor;
private SimpleEditor<String> oldNameEditor;
private SimpleEditor<Boolean> isNewEditor;
private SelectionListEditor sitesEditor;
private Button buttonSave;
private Button buttonCancel;
private Button buttonAddNewSite;
public WorkingSetEditor(final AddRemoveListModel sitesModel) {
setText("Edit Working Set");
LayoutPanel layoutPanel = new LayoutPanel();
setWidget(layoutPanel);
layoutPanel.setSize("627px", "498px");
InlineLabel nlnlblNewInlinelabel = new InlineLabel("Working Set Name:");
layoutPanel.add(nlnlblNewInlinelabel);
layoutPanel.setWidgetLeftWidth(nlnlblNewInlinelabel, 0.0, Unit.PX, 121.0, Unit.PX);
layoutPanel.setWidgetTopHeight(nlnlblNewInlinelabel, 0.0, Unit.PX, 28.0, Unit.PX);
newNameEditor = new TextBox();
layoutPanel.add(newNameEditor);
layoutPanel.setWidgetLeftRight(newNameEditor, 127.0, Unit.PX, 0.0, Unit.PX);
layoutPanel.setWidgetTopHeight(newNameEditor, 0.0, Unit.PX, 28.0, Unit.PX);
ListPanel listPanelSites = new ListPanel();
layoutPanel.add(listPanelSites);
layoutPanel.setWidgetLeftWidth(listPanelSites, 0.0, Unit.PX, 50.0, Unit.PCT);
layoutPanel.setWidgetTopBottom(listPanelSites, 65.0, Unit.PX, 86.0, Unit.PX);
InlineLabel nlnlblAvailableSites = new InlineLabel("Available Sites:");
layoutPanel.add(nlnlblAvailableSites);
layoutPanel.setWidgetLeftWidth(nlnlblAvailableSites, 0.0, Unit.PX, 90.0, Unit.PX);
layoutPanel.setWidgetTopHeight(nlnlblAvailableSites, 34.0, Unit.PX, 24.0, Unit.PX);
ListPanel listPanelSelectedSites = new ListPanel();
layoutPanel.add(listPanelSelectedSites);
layoutPanel.setWidgetRightWidth(listPanelSelectedSites, 0.0, Unit.PX, 50.0, Unit.PCT);
layoutPanel.setWidgetTopBottom(listPanelSelectedSites, 65.0, Unit.PX, 86.0, Unit.PX);
InlineLabel nlnlblSelectedSites = new InlineLabel("Selected Sites:");
layoutPanel.add(nlnlblSelectedSites);
layoutPanel.setWidgetLeftWidth(nlnlblSelectedSites, 50.0, Unit.PCT, 108.0, Unit.PX);
layoutPanel.setWidgetTopHeight(nlnlblSelectedSites, 34.0, Unit.PX, 24.0, Unit.PX);
Button buttonRemoveAllSites = new Button("Remove All");
layoutPanel.add(buttonRemoveAllSites);
layoutPanel.setWidgetRightWidth(buttonRemoveAllSites, 0.0, Unit.PX, 130.0, Unit.PX);
layoutPanel.setWidgetBottomHeight(buttonRemoveAllSites, 56.0, Unit.PX, 24.0, Unit.PX);
buttonRemoveAllSites.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
sitesModel.clearSelection();
}
});
buttonAddNewSite = new Button("Add New Site");
layoutPanel.add(buttonAddNewSite);
layoutPanel.setWidgetRightWidth(buttonAddNewSite, 50.0, Unit.PCT, 130.0, Unit.PX);
layoutPanel.setWidgetBottomHeight(buttonAddNewSite, 56.0, Unit.PX, 24.0, Unit.PX);
buttonSave = new Button("Save");
layoutPanel.add(buttonSave);
layoutPanel.setWidgetRightWidth(buttonSave, 0.0, Unit.PX, 130.0, Unit.PX);
layoutPanel.setWidgetBottomHeight(buttonSave, 0.0, Unit.PX, 24.0, Unit.PX);
buttonCancel = new Button("Cancel");
buttonCancel.setText("Cancel");
layoutPanel.add(buttonCancel);
layoutPanel.setWidgetRightWidth(buttonCancel, 136.0, Unit.PX, 130.0, Unit.PX);
layoutPanel.setWidgetBottomHeight(buttonCancel, 0.0, Unit.PX, 24.0, Unit.PX);
labelErrorMessage = new InlineLabel("");
layoutPanel.add(labelErrorMessage);
layoutPanel.setWidgetLeftRight(labelErrorMessage, 0.0, Unit.PX, 0.0, Unit.PX);
layoutPanel.setWidgetBottomHeight(labelErrorMessage, 31.0, Unit.PX, 19.0, Unit.PX);
sitesEditor = new SelectionListEditor(sitesModel);
oldNameEditor = SimpleEditor.of();
isNewEditor = SimpleEditor.of();
}
public IsEditor<ValueBoxEditor<String>> newName() {
return newNameEditor;
}
public SimpleEditor<String> oldName() {
return oldNameEditor;
}
public SimpleEditor<Boolean> isNew() {
return isNewEditor;
}
public Editor<List<String>> sites() {
return sitesEditor;
}
@Override
public HasClickHandlers getButtonSave() {
return buttonSave;
}
@Override
public HasClickHandlers getButtonCancel() {
return buttonCancel;
}
@Override
public HasClickHandlers getButtonAddNewSite() {
return buttonAddNewSite;
}
@Override
public HasText getErrorText() {
return labelErrorMessage;
}
}
|
package ch.elexis.core.ui.preferences;
import java.util.List;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.beans.PojoProperties;
import org.eclipse.core.databinding.observable.value.IObservableValue;
import org.eclipse.core.databinding.observable.value.IValueChangeListener;
import org.eclipse.core.databinding.observable.value.ValueChangeEvent;
import org.eclipse.core.databinding.observable.value.WritableValue;
import org.eclipse.jface.databinding.swt.WidgetProperties;
import org.eclipse.jface.layout.TableColumnLayout;
import org.eclipse.jface.preference.PreferencePage;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.CellLabelProvider;
import org.eclipse.jface.viewers.CheckboxTableViewer;
import org.eclipse.jface.viewers.ColumnWeightData;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.jface.viewers.ViewerCell;
import org.eclipse.jface.window.Window;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.MenuAdapter;
import org.eclipse.swt.events.MenuEvent;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Color;
import org.eclipse.swt.graphics.RGB;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.ColorDialog;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Group;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPreferencePage;
import ch.elexis.admin.AccessControlDefaults;
import ch.elexis.core.data.activator.CoreHub;
import ch.elexis.core.ui.UiDesk;
import ch.elexis.core.ui.coolbar.MandantSelectionContributionItem;
import ch.elexis.core.ui.data.UiMandant;
import ch.elexis.core.ui.dialogs.KontaktSelektor;
import ch.elexis.core.ui.icons.Images;
import ch.elexis.core.ui.preferences.inputs.PrefAccessDenied;
import ch.elexis.core.ui.util.SWTHelper;
import ch.elexis.core.ui.util.viewers.DefaultLabelProvider;
import ch.elexis.data.Anwender;
import ch.elexis.data.Kontakt;
import ch.elexis.data.Mandant;
import ch.elexis.data.Person;
import ch.elexis.data.Query;
import ch.elexis.data.Rechnungssteller;
import ch.elexis.data.Role;
import ch.elexis.data.User;
public class UserManagementPreferencePage extends PreferencePage
implements IWorkbenchPreferencePage {
private DataBindingContext m_bindingContext;
private TableViewer tableViewerUsers;
private WritableValue wvUser = new WritableValue(null, User.class);
private WritableValue wvAnwender = new WritableValue(null, Anwender.class);
private Text txtUsername;
private Button btnIsExecutiveDoctor;
private Label lblRespPhysColor;
private Group grpAccounting;
public static final String CHANGE_LINK = "<a>ändern</a>";
private Link linkContact;
private Text txtPassword;
private Text txtPassword2;
private CheckboxTableViewer checkboxTableViewerAssociation;
private CheckboxTableViewer checkboxTableViewerRoles;
private Link linkChangePassword;
private Button btnUserIsAdmin;
private Color lblRespPhysColorDefColor;
private Link linkRechnungssteller;
private MenuItem addUserMenuItem;
private MenuItem deleteUserMenuItem;
/**
* Create the preference page.
*/
public UserManagementPreferencePage(){
setTitle("Benutzerverwaltung");
noDefaultAndApplyButton();
}
/**
* Create contents of the preference page.
*
* @param parent
*/
@Override
public Control createContents(Composite parent){
if (!CoreHub.acl.request(AccessControlDefaults.ACL_USERS)) {
return new PrefAccessDenied(parent);
}
Composite container = new Composite(parent, SWT.NULL);
container.setLayout(new GridLayout(3, false));
Composite compositeSelectorTable = new Composite(container, SWT.NONE);
GridData gd_compositeSelectorTable = new GridData(SWT.LEFT, SWT.FILL, false, true, 1, 1);
gd_compositeSelectorTable.widthHint = 130;
compositeSelectorTable.setLayoutData(gd_compositeSelectorTable);
TableColumnLayout tcl_compositeSelectorTable = new TableColumnLayout();
compositeSelectorTable.setLayout(tcl_compositeSelectorTable);
tableViewerUsers = new TableViewer(compositeSelectorTable, SWT.BORDER | SWT.FULL_SELECTION);
tableViewerUsers.setContentProvider(ArrayContentProvider.getInstance());
Table tableUsers = tableViewerUsers.getTable();
tableUsers.setLinesVisible(true);
tableViewerUsers.addSelectionChangedListener(e -> {
StructuredSelection ss = (StructuredSelection) e.getSelection();
wvUser.setValue(ss == null ? null : ss.getFirstElement());
});
TableViewerColumn tableViewerColumnName = new TableViewerColumn(tableViewerUsers, SWT.NONE);
TableColumn tblclmnName = tableViewerColumnName.getColumn();
tcl_compositeSelectorTable.setColumnData(tblclmnName, new ColumnWeightData(100));
tableViewerColumnName.setLabelProvider(new AnwenderCellLabelProvider());
Menu tableUsersMenu = new Menu(tableUsers);
tableUsers.setMenu(tableUsersMenu);
tableUsersMenu.addMenuListener(new MenuAdapter() {
@Override
public void menuShown(MenuEvent e){
StructuredSelection ss = (StructuredSelection) tableViewerUsers.getSelection();
deleteUserMenuItem.setEnabled(!ss.isEmpty());
}
});
addUserMenuItem = new MenuItem(tableUsersMenu, SWT.NONE);
addUserMenuItem.setText("Benutzer hinzufügen");
addUserMenuItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e){
User newUser = new User(null, null, "");
updateUserList();
tableViewerUsers.setSelection(new StructuredSelection(newUser));
}
});
deleteUserMenuItem = new MenuItem(tableUsersMenu, SWT.NONE);
deleteUserMenuItem.setText("Benutzer löschen");
deleteUserMenuItem.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e){
StructuredSelection ss = (StructuredSelection) tableViewerUsers.getSelection();
User u = (User) ss.getFirstElement();
u.delete();
updateUserList();
}
});
Composite compositeEdit = new Composite(container, SWT.NONE);
GridLayout gl_compositeEdit = new GridLayout(2, false);
gl_compositeEdit.marginHeight = 0;
gl_compositeEdit.marginWidth = 0;
compositeEdit.setLayout(gl_compositeEdit);
compositeEdit.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
Group grpSysAccess = new Group(compositeEdit, SWT.NONE);
grpSysAccess.setText("Systemzugang");
grpSysAccess.setLayout(new GridLayout(4, false));
grpSysAccess.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 2, 1));
Label lblBenutzername = new Label(grpSysAccess, SWT.NONE);
lblBenutzername.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblBenutzername.setText("Benutzername");
Composite compUsername = new Composite(grpSysAccess, SWT.NONE);
compUsername.setLayout(new FillLayout(SWT.HORIZONTAL));
compUsername.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1));
txtUsername = new Text(compUsername, SWT.BORDER);
Link linkChangeUsername = new Link(compUsername, SWT.NONE);
linkChangeUsername.setText(CHANGE_LINK);
linkChangeUsername.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e){
String newUsername = txtUsername.getText();
boolean isFree = User.verifyUsernameNotTaken(newUsername);
if (isFree) {
setErrorMessage(null);
User u = (User) wvUser.getValue();
User changedUser = u.setUsername(newUsername);
updateUserList();
tableViewerUsers.setSelection(new StructuredSelection(changedUser));
} else {
setErrorMessage("Dieser Username ist bereits vergeben.");
}
}
});
btnUserIsAdmin = new Button(grpSysAccess, SWT.CHECK);
btnUserIsAdmin.setToolTipText("Administratoren unterliegen keinerlei Beschränkungen.");
btnUserIsAdmin.setText("Administrator");
Button btnUserIsLocked = new Button(grpSysAccess, SWT.CHECK);
btnUserIsLocked.setToolTipText("Sperrt die Möglichkeit sich am System anzumelden.");
btnUserIsLocked.setText("Gesperrt");
Label lblPasswort = new Label(grpSysAccess, SWT.NONE);
lblPasswort.setText("Passwort");
Composite composite = new Composite(grpSysAccess, SWT.NONE);
composite.setLayout(new FillLayout(SWT.HORIZONTAL));
composite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 1, 1));
txtPassword = new Text(composite, SWT.BORDER | SWT.PASSWORD);
txtPassword2 = new Text(composite, SWT.BORDER | SWT.PASSWORD);
txtPassword.setToolTipText("Password hier eingeben. Nur * werden angezeigt");
txtPassword2.setToolTipText("Zur Kontrolle Password hier nochmals eingeben. Nur * werden angezeigt");
linkChangePassword = new Link(grpSysAccess, SWT.NONE);
linkChangePassword.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 2, 1));
linkChangePassword.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e){
String pw1 = txtPassword.getText();
String pw2 = txtPassword2.getText();
if (pw1 != null && pw1.length() > 2 && pw1.equals(pw2)) {
setErrorMessage(null);
User u = (User) wvUser.getValue();
u.setPassword(pw1);
linkChangePassword.setText(CHANGE_LINK + " OK");
} else {
setErrorMessage(
"Passwörter nicht ident, oder Passwort zu kurz (min 3 Zeichen)");
}
}
});
Composite sashComposite = new Composite(compositeEdit, SWT.NONE);
sashComposite.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
sashComposite.setLayout(new FillLayout(SWT.HORIZONTAL));
grpAccounting = new Group(sashComposite, SWT.NONE);
grpAccounting.setText("Verrechnung");
GridLayout gl_grpAccounting = new GridLayout(1, false);
gl_grpAccounting.marginHeight = 0;
grpAccounting.setLayout(gl_grpAccounting);
Composite compositeContact = new Composite(grpAccounting, SWT.NONE);
GridLayout gl_compositeContact = new GridLayout(2, false);
gl_compositeContact.horizontalSpacing = 0;
gl_compositeContact.verticalSpacing = 0;
gl_compositeContact.marginWidth = 0;
gl_compositeContact.marginHeight = 0;
compositeContact.setLayout(gl_compositeContact);
compositeContact.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 1, 1));
Label lblKontakt = new Label(compositeContact, SWT.NONE);
lblKontakt.setText("Kontakt");
linkContact = new Link(compositeContact, SWT.NONE);
linkContact.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
linkContact.setText("nicht gesetzt " + CHANGE_LINK);
linkContact.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e){
User user = (User) wvUser.getValue();
if (user == null)
return;
KontaktSelektor ks =
new KontaktSelektor(UiDesk.getTopShell(), Person.class, "Kontakt auswählen",
"Bitte selektieren Sie den zugeordneten Kontakt", new String[] {});
int ret = ks.open();
if (ret == Window.OK) {
Person p = (Person) ks.getSelection();
user.setAssignedContact(p);
linkContact.setText(p.getPersonalia() + " " + CHANGE_LINK);
}
}
});
btnIsExecutiveDoctor = new Button(grpAccounting, SWT.CHECK);
btnIsExecutiveDoctor.setLayoutData(new GridData(SWT.LEFT, SWT.TOP, false, false, 1, 1));
btnIsExecutiveDoctor.setText("ist verantwortlicher Arzt");
Composite compositeIsRespPhys = new Composite(grpAccounting, SWT.BORDER);
compositeIsRespPhys.setLayoutData(new GridData(SWT.FILL, SWT.FILL, false, false, 1, 1));
GridLayout gl_compositeIsRespPhys = new GridLayout(2, false);
gl_compositeIsRespPhys.marginHeight = 0;
compositeIsRespPhys.setLayout(gl_compositeIsRespPhys);
lblRespPhysColor = new Label(compositeIsRespPhys, SWT.NONE);
lblRespPhysColor.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 2, 1));
lblRespPhysColor.setText("zugeordnete Farbe");
lblRespPhysColorDefColor = lblRespPhysColor.getBackground();
lblRespPhysColor.addMouseListener(new MouseAdapter() {
@Override
public void mouseDown(MouseEvent e){
if (!btnIsExecutiveDoctor.getSelection()) {
return;
}
ColorDialog cd = new ColorDialog(UiDesk.getTopShell());
cd.setRGB(lblRespPhysColor.getBackground().getRGB());
cd.setText(Messages.UserManagementPreferencePage_MandatorColorSelectTitle);
RGB rgb = cd.open();
User user = (User) wvUser.getValue();
Mandant m = Mandant.load(user.getAssignedContactId());
UiMandant.setColorForMandator(m, rgb);
lblRespPhysColor.setBackground(UiMandant.getColorForMandator(m));
}
});
Label lblRechnungssteller = new Label(compositeIsRespPhys, SWT.NONE);
lblRechnungssteller.setText("Rechnungssteller");
linkRechnungssteller = new Link(compositeIsRespPhys, SWT.NONE);
linkRechnungssteller.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, true, false, 1, 1));
linkRechnungssteller.setText("nicht gesetzt " + CHANGE_LINK);
linkRechnungssteller.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e){
User user = (User) wvUser.getValue();
if (user == null)
return;
Anwender ac = user.getAssignedContact();
if (ac == null || !ac.isExecutiveDoctor())
return;
KontaktSelektor ks =
new KontaktSelektor(UiDesk.getTopShell(), Person.class, "Kontakt auswählen",
"Bitte selektieren Sie den zugeordneten Kontakt", new String[] {});
int ret = ks.open();
if (ret == Window.OK) {
Kontakt kontakt = (Kontakt) ks.getSelection();
if (kontakt == null)
return;
Mandant mand = Mandant.load(ac.getId());
mand.setRechnungssteller(kontakt);
linkRechnungssteller
.setText(mand.getRechnungssteller().getLabel() + " " + CHANGE_LINK);
}
}
});
Label lblFrVerantwortlichenArzt = new Label(grpAccounting, SWT.NONE);
lblFrVerantwortlichenArzt
.setLayoutData(new GridData(SWT.LEFT, SWT.CENTER, false, false, 2, 1));
lblFrVerantwortlichenArzt.setText("tätig für");
Composite compositeAssociation = new Composite(grpAccounting, SWT.NONE);
compositeAssociation.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1));
TableColumnLayout tcl_compositeAssociation = new TableColumnLayout();
compositeAssociation.setLayout(tcl_compositeAssociation);
checkboxTableViewerAssociation =
CheckboxTableViewer.newCheckList(compositeAssociation, SWT.BORDER | SWT.FULL_SELECTION);
checkboxTableViewerAssociation.addCheckStateListener((e) -> {
Mandant m = (Mandant) e.getElement();
if (m == null)
return;
User user = (User) wvUser.getValue();
Anwender anw = user.getAssignedContact();
if(anw!=null) {
anw.addOrRemoveExecutiveDoctorWorkingFor(m, e.getChecked());
} else {
SWTHelper.showError("No contact assigned", "There is no contact assigned to user "+user.getLabel());
}
});
Group grpRoles = new Group(sashComposite, SWT.NONE);
grpRoles.setText("Rollenzuordnung");
GridLayout gl_grpRoles = new GridLayout(2, false);
gl_grpRoles.marginHeight = 0;
grpRoles.setLayout(gl_grpRoles);
Composite compositeRoles = new Composite(grpRoles, SWT.NONE);
compositeRoles.setLayout(new FillLayout(SWT.HORIZONTAL));
compositeRoles.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1));
checkboxTableViewerRoles =
CheckboxTableViewer.newCheckList(compositeRoles, SWT.BORDER | SWT.FULL_SELECTION);
new Label(compositeEdit, SWT.NONE);
new Label(container, SWT.NONE);
checkboxTableViewerRoles.setContentProvider(ArrayContentProvider.getInstance());
checkboxTableViewerRoles.setLabelProvider(new DefaultLabelProvider() {
@Override
public String getColumnText(Object element, int columnIndex){
Role r = (Role) element;
return r.getTranslatedLabel() != null ? r.getTranslatedLabel() : r.getId();
}
});
checkboxTableViewerRoles.addCheckStateListener((e) -> {
Role r = (Role) e.getElement();
if (r == null)
return;
User user = (User) wvUser.getValue();
user.setAssignedRole(r, e.getChecked());
});
checkboxTableViewerAssociation.setContentProvider(ArrayContentProvider.getInstance());
checkboxTableViewerAssociation.setLabelProvider(new DefaultLabelProvider() {
@Override
public String getColumnText(Object element, int columnIndex){
Mandant m = (Mandant) element;
return m.getName() + " " + m.getVorname();
}
});
m_bindingContext = initDataBindings();
wvUser.addValueChangeListener(new ValueChangedAdapter());
updateUserList();
return container;
}
/**
* Initialize the preference page.
*/
@Override
public void init(IWorkbench workbench){
// Initialize the preference page
}
private void updateUserList(){
List<User> query = new Query<User>(User.class).execute();
query.sort((u1, u2) -> u1.getLabel().compareTo(u2.getLabel()));
tableViewerUsers.setInput(query);
}
private class ValueChangedAdapter implements IValueChangeListener {
@Override
public void handleValueChange(ValueChangeEvent event){
User user = (User) wvUser.getValue();
if (user == null) {
wvAnwender.setValue(null);
return;
}
setErrorMessage(null);
txtPassword.setText("");
txtPassword2.setText("");
txtUsername.setText(user.getUsername());
linkChangePassword.setText(CHANGE_LINK + " (Passwort gesetzt)");
Anwender anw = user.getAssignedContact();
wvAnwender.setValue(anw);
String text = (anw != null) ? anw.getPersonalia() : "Nicht gesetzt";
linkContact.setText(text + " " + CHANGE_LINK);
List<Role> roles = new Query<Role>(Role.class).execute();
checkboxTableViewerRoles.setInput(roles);
Object[] assignedRoles = user.getAssignedRoles().toArray();
checkboxTableViewerRoles.setCheckedElements(assignedRoles);
checkboxTableViewerAssociation.setInput(new Query<Mandant>(Mandant.class).execute());
checkboxTableViewerAssociation.setCheckedElements(new Mandant[] {});
linkRechnungssteller.setText("- " + CHANGE_LINK);
lblRespPhysColor.setBackground(lblRespPhysColorDefColor);
if (anw != null) {
checkboxTableViewerAssociation
.setCheckedElements(anw.getExecutiveDoctorsWorkingFor().toArray());
if (anw.isExecutiveDoctor()) {
Mandant m = Mandant.load(anw.getId());
Color color = UiMandant.getColorForMandator(m);
lblRespPhysColor.setBackground(color);
Rechnungssteller rs = m.getRechnungssteller();
String rst = (rs != null) ? rs.getLabel() : "Nicht gesetzt";
linkRechnungssteller.setText(rst + " " + CHANGE_LINK);
}
}
}
}
private class AnwenderCellLabelProvider extends CellLabelProvider {
@Override
public void update(ViewerCell cell){
User user = (User) cell.getElement();
cell.setText(user.getLabel());
if (user.isAdministrator()) {
cell.setImage(Images.IMG_AUSRUFEZ.getImage());
}
Anwender ac = user.getAssignedContact();
if (ac != null && ac.isExecutiveDoctor()) {
Mandant m = Mandant.load(ac.getId());
Color mc = UiMandant.getColorForMandator(m);
cell.setImage(MandantSelectionContributionItem.getBoxSWTColorImage(mc));
} else {
cell.setImage(Images.IMG_EMPTY_TRANSPARENT.getImage());
}
}
}
protected DataBindingContext initDataBindings(){
DataBindingContext bindingContext = new DataBindingContext();
IObservableValue observeSelectionBtnIsAdminObserveWidget =
WidgetProperties.selection().observe(btnUserIsAdmin);
IObservableValue wvAdminObserveDetailValue =
PojoProperties.value(User.class, "administrator", boolean.class).observeDetail(wvUser);
bindingContext.bindValue(observeSelectionBtnIsAdminObserveWidget, wvAdminObserveDetailValue,
null, null);
IObservableValue observeSelectionBtnIsMandatorObserveWidget =
WidgetProperties.selection().observe(btnIsExecutiveDoctor);
IObservableValue wvMandatorObserveDetailValue = PojoProperties
.value(Anwender.class, "executiveDoctor", boolean.class).observeDetail(wvAnwender);
bindingContext.bindValue(observeSelectionBtnIsMandatorObserveWidget,
wvMandatorObserveDetailValue, null, null);
return bindingContext;
}
}
|
package gov.healthit.chpl.auth.user.dao.impl;
import gov.healthit.chpl.auth.user.User;
import gov.healthit.chpl.auth.user.UserImpl;
import gov.healthit.chpl.auth.user.UserRetrievalException;
import gov.healthit.chpl.auth.user.dao.UserDAO;
import java.util.List;
import javax.persistence.Query;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
@Repository(value="userDAO")
public class UserDAOImpl extends BaseDAOImpl implements UserDAO {
@Transactional
@Override
public void create(UserImpl user) {
entityManager.persist(user);
}
@Transactional
@Override
public void update(UserImpl user) {
entityManager.merge(user);
}
@Transactional
@Override
public void deactivate(String uname) {
Query query = entityManager.createQuery("UPDATE UserImpl SET deleted = true WHERE c.user_id = :uname");
query.setParameter("uname", uname);
query.executeUpdate();
}
@Transactional
@Override
public void deactivate(Long userId){
Query query = entityManager.createQuery("UPDATE UserImpl SET deleted = true WHERE c.user_id = :userid");
query.setParameter("userid", userId);
query.executeUpdate();
}
@Override
public List<UserImpl> findAll() {
//TODO: Where not deleted
List<UserImpl> result = entityManager.createQuery( "from UserImpl ", UserImpl.class ).getResultList();
return result;
}
@Override
public UserImpl getById(Long userId) throws UserRetrievalException {
//TODO: Where not deleted
UserImpl user = null;
Query query = entityManager.createQuery( "from user where user_id = :userid", UserImpl.class );
query.setParameter("userid", userId);
List<UserImpl> result = query.getResultList();
if (result.size() > 1){
throw new UserRetrievalException("Data error. Duplicate user id in database.");
}
if (result.size() < 0){
user = result.get(0);
}
return user;
}
@Override
public UserImpl getByName(String uname) throws UserRetrievalException {
UserImpl user = null;
Query query = entityManager.createQuery( "from UserImpl where user_name = :uname", UserImpl.class );
query.setParameter("uname", uname);
List<UserImpl> result = query.getResultList();
if (result.size() > 1){
throw new UserRetrievalException("Data error. Duplicate user name in database.");
}
if (result.size() > 0){
user = result.get(0);
}
return user;
}
}
|
package com.github.neunkasulle.chronocommand.control;
import com.github.neunkasulle.chronocommand.model.*;
import com.github.neunkasulle.chronocommand.model.Message;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.mail.*;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import java.io.File;
import java.time.LocalDateTime;
import java.time.Month;
import java.time.temporal.ChronoUnit;
import java.util.LinkedList;
import java.util.List;
import java.time.LocalDate;
import java.util.Properties;
public class TimeSheetControl {
private static final Logger LOGGER = LoggerFactory.getLogger(TimeSheetControl.class);
private static final String NEWTIMESHEET = "New time sheet created";
private static final String LOCKED = "Time sheet is locked";
private static final String MISSCAT = "Missing category";
private static TimeSheetControl ourInstance = new TimeSheetControl();
private TimeSheetControl() {
}
/**
* Gets the one Instance of the TimeSheetControl.
* @return The one TimeSheetControl instance.
*/
public static TimeSheetControl getInstance() {
return ourInstance;
}
/**
* creates a new TimeRecord. And a new Time Sheet when there is no time sheet this month
* @param user The user which the time record belongs to
*/
public TimeRecord newTimeRecord(User user) throws ChronoCommandException {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeSheet timeSheet = timeSheetDAO.getTimeSheet(LocalDate.now().getMonth(), LocalDate.now().getYear(), user);
if(timeSheet == null){ //No Time sheet yet, we need to build a new one
timeSheet = new TimeSheet(user, LocalDate.now().getMonth(), LocalDate.now().getYear());
timeSheetDAO.saveTimeSheet(timeSheet);
LOGGER.info(NEWTIMESHEET + LocalDate.now().getMonth() + LocalDate.now().getYear() +user.getUsername());
}
TimeRecord timeRecord = new TimeRecord(LocalDateTime.now(), null, null, null, timeSheet);
if (timeSheet.getState() != TimeSheetState.UNLOCKED) {
LOGGER.error(LOCKED);
throw new ChronoCommandException(Reason.TIMESHEETLOCKED);
}
timeSheetDAO.saveTimeRecord(timeRecord);
LOGGER.info("new time record started for" + user.getUsername());
return timeRecord;
}
/**
* creates a new TimeRecord. And a new Time Sheet when there is no time sheet this month
* @param category the category of the work performed in this time.
* @param description description of the work performed.
* @param user The user which the time record belongs to
* @throws ChronoCommandException When there is something wrong with e.g. the category
*/
public TimeRecord newTimeRecord(Category category, String description, User user) throws ChronoCommandException {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeSheet timeSheet = timeSheetDAO.getTimeSheet(LocalDate.now().getMonth(), LocalDate.now().getYear(), user);
if(timeSheet == null) { //No Time sheet yet, we need to build a new one
timeSheet = new TimeSheet(user, LocalDate.now().getMonth(), LocalDate.now().getYear());
timeSheetDAO.saveTimeSheet(timeSheet);
LOGGER.info(NEWTIMESHEET + LocalDate.now().getMonth() + LocalDate.now().getYear() +user.getUsername());
}
if (timeSheet.getState() != TimeSheetState.UNLOCKED) {
LOGGER.error(LOCKED);
throw new ChronoCommandException(Reason.TIMESHEETLOCKED);
}
TimeRecord timeRecord = new TimeRecord(LocalDateTime.now(), null, category, description, timeSheet);
timeSheetDAO.saveTimeRecord(timeRecord);
LOGGER.info("new time record started for" + user.getUsername());
return timeRecord;
}
/**
* Closes a time record. Cant be invoked if newTimeRecord was called without category and description
* @param user The user which the time record belongs to
* @throws ChronoCommandException ChronoCommandException When there is something wrong with e.g. the category
*/
public TimeRecord closeTimeRecord(User user) throws ChronoCommandException {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeRecord timeRecord = timeSheetDAO.getLatestTimeRecord(user);
if (timeRecord.getTimeSheet().getState() != TimeSheetState.UNLOCKED) {
LOGGER.error(LOCKED);
throw new ChronoCommandException(Reason.TIMESHEETLOCKED);
}
if(timeRecord.getCategory() == null) {
LOGGER.error(MISSCAT);
throw new ChronoCommandException(Reason.MISSINGCATEGORY);
}
timeRecord.setEnd(LocalDateTime.now());
timeSheetDAO.saveTimeRecord(timeRecord);
LOGGER.info("Time record closed for" + user.getUsername());
return timeRecord;
}
/**
* Closes a time record. Cant be invoked if newTimeRecord was called with category and description
* @param category the category of the work performed in this time.
* @param description description of the work performed.
* @param user The user which the time record belongs to
* @throws ChronoCommandException When there is something wrong with e.g. the category
*/
public TimeRecord closeTimeRecord(Category category, String description, User user) throws ChronoCommandException {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeRecord timeRecord = timeSheetDAO.getLatestTimeRecord(user);
if (timeRecord.getTimeSheet().getState() != TimeSheetState.UNLOCKED) {
LOGGER.error(LOCKED);
throw new ChronoCommandException(Reason.TIMESHEETLOCKED);
}
if (category == null && timeRecord.getCategory() == null) {
LOGGER.error(MISSCAT);
throw new ChronoCommandException(Reason.MISSINGCATEGORY);
}
timeRecord.setCategory(category);
timeRecord.setDescription(description);
timeRecord.setEnd(LocalDateTime.now());
TimeSheetDAO.getInstance().saveTimeRecord(timeRecord);
LOGGER.info("Time record closed for" + user.getUsername());
return timeRecord;
}
public TimeRecord getLatestTimeRecord(User user) {
return TimeSheetDAO.getInstance().getLatestTimeRecord(user);
}
/**
* Adds a whole time record to the timeSheet
* @param beginn Start of work
* @param end end of work
* @param category the category of the work performed in this time.
* @param description description of the work performed.
* @param user The user which the time record belongs to
* @throws ChronoCommandException When there is something wrong with e.g. the category
*/
public void addTimeToSheet(LocalDateTime beginn, LocalDateTime end, Category category, String description, User user)
throws ChronoCommandException {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeSheet timeSheet = timeSheetDAO.getTimeSheet(beginn.getMonth(), beginn.getYear(), user);
if(category == null) {
LOGGER.error(MISSCAT);
throw new ChronoCommandException(Reason.MISSINGCATEGORY);
}
if (timeSheet == null) { //No Time sheet yet, we need to build a new one
timeSheet = new TimeSheet(user, beginn.getMonth(), beginn.getYear());
timeSheetDAO.saveTimeSheet(timeSheet);
LOGGER.info(NEWTIMESHEET + LocalDate.now().getMonth() + LocalDate.now().getYear() +user.getUsername());
}
if (timeSheet.getState() != TimeSheetState.UNLOCKED) {
LOGGER.error(LOCKED);
throw new ChronoCommandException(Reason.TIMESHEETLOCKED);
}
timeSheetDAO.saveTimeRecord(new TimeRecord(beginn, end, category, description, timeSheet));
LOGGER.info("Time record created for" + user.getUsername());
}
/**
* Collects all time Sheets from user which are supervised by a specific user
* @param month the month from which the time sheets will be collected
* @param year the year from which the time sheets will be collected
* @param user the supervising user
* @return A list of time sheets
*/
/*
public List<TimeSheet> getSupervisedTimeSheets(Month month, int year, User user) {
//not needed for now
return null;
}
*/
/**
* A timeSheet will be locked against changes
* @param timeSheet the timesheet which will be locked
*/
public void lockTimeSheet(TimeSheet timeSheet, User user) {
timeSheet.setTimeSheetState(TimeSheetState.LOCKED);
LOGGER.info("Locked:" + timeSheet.getMonth() + user.getUsername());
}
/**
* A TimeSheet will be unlocked again and thus can be changed again
* @param timeSheet the time sheet which will be unlocked
*/
public void unlockTimeSheet(TimeSheet timeSheet, User user) {
timeSheet.setTimeSheetState(TimeSheetState.UNLOCKED);
LOGGER.info("unlocked:" + timeSheet.getMonth() + user.getUsername());
}
/**
* A time sheet will be marked as checked
* @param timeSheet the time sheet which will be marked
*/
public void approveTimeSheet(TimeSheet timeSheet, User user) {
LOGGER.info("checked:" + timeSheet.getMonth() + user.getUsername());
timeSheet.setTimeSheetState(TimeSheetState.CHECKED);
}
/**
* Prints out all time sheets which are checked
* @param month the month of the time sheets
* @param year the year of the time sheets
* @return a Pdf File which an be printed
*/
public File printCheckedTimeSheets(Month month, int year) {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeSheetHandler timeSheetHandler = TimeSheetHandler.getInstance();
List<TimeSheet> unfilteredTimeSheets = timeSheetDAO.getAllTimeSheets(month, year);
List<TimeSheet> filteredTimeSheets = new LinkedList<>();
for(TimeSheet timeSheet: unfilteredTimeSheets) {
if(timeSheet.getState() == TimeSheetState.CHECKED){
filteredTimeSheets.add(timeSheet);
}
}
return timeSheetHandler.createPdfFromAllTimeSheets(filteredTimeSheets);
}
/**
* Prints out all time sheets
* @param month the month of the time sheets
* @param year the year of the time sheets
* @return a Pdf File which an be printed
*/
public File printAllTimeSheets(Month month, int year) {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeSheetHandler timeSheetHandler = TimeSheetHandler.getInstance();
List<TimeSheet> timeSheets = timeSheetDAO.getAllTimeSheets(month, year);
return timeSheetHandler.createPdfFromAllTimeSheets(timeSheets);
}
/**
* Prints out a specific time sheet
* @param timeSheet the time sheet which should be printed
* @return a Pdf File which an be printed
*/
public File printTimeSheet(TimeSheet timeSheet) {
TimeSheetHandler timeSheetHandler = TimeSheetHandler.getInstance();
return timeSheetHandler.createPdfFromTimeSheet(timeSheet);
}
/**
* Prints all time sheets from a specific user
* @param user the user from which the time sheets are printed
* @return a Pdf File which an be printed
*/
public File printAllTimeSheets(User user) {
TimeSheetDAO timeSheetDAO = TimeSheetDAO.getInstance();
TimeSheetHandler timeSheetHandler = TimeSheetHandler.getInstance();
List<TimeSheet> timeSheets = timeSheetDAO.getTimeSheetsFromUser(user);
return timeSheetHandler.createPdfFromAllTimeSheets(timeSheets);
}
/**
* Gets all categories for a timerecord
* @return a list of categories
*/
public List<Category> getAllCategories() {
return CategoryDAO.getInstance().getAllCategories();
}
/**
* get all time sheets from a time frame
* @param month the month of the time sheets
* @param year the year of the time sheets
* @return A list of time sheet in the specified time frame
*/
public List<TimeSheet> getTimeSheet(Month month, int year) {
return TimeSheetDAO.getInstance().getAllTimeSheets(month, year);
}
/**
* Get all time sheets from a user
* @param user The user owning the time sheets
* @return List of all time sheets
*/
public List<TimeSheet> getTimeSheetsFromUser(User user) {
return TimeSheetDAO.getInstance().getTimeSheetsFromUser(user);
}
/**
* Gets the current sum of working hours
* @param timeRecords A number of time records to sum up
* @return the Number of working hours
*/
private int getCurrentMinutes(TimeRecord[] timeRecords) {
int currentMinutes = 0;
for (TimeRecord timeRecord : timeRecords) {
currentMinutes += ChronoUnit.MINUTES.between(timeRecord.getBeginning(), timeRecord.getEnd());
}
return currentMinutes;
}
public void editTimeRecord(TimeRecord timeRecord) throws ChronoCommandException {
if (timeRecord.getTimeSheet().getState() != TimeSheetState.UNLOCKED) {
LOGGER.error("Time sheet is locked");
throw new ChronoCommandException(Reason.TIMESHEETLOCKED);
}
// TODO check for valid data
if (!LoginControl.getInstance().getCurrentUser().getId().equals(timeRecord.getTimeSheet().getUser().getId())) {
LOGGER.error("not permitted to perform action: editTimeRecord caused by" + LoginControl.getInstance().getCurrentUser().getUsername());
throw new ChronoCommandException(Reason.NOTPERMITTED);
}
TimeSheetDAO.getInstance().saveTimeRecord(timeRecord);
}
public void addMessageToTimeSheet(TimeSheet timeSheet, Message message) {
timeSheet.setMessage(message);
}
public List<Message> getMessagesFromTimeSheet(TimeSheet timeSheet) {
return timeSheet.getMessages();
}
public void sendEmail(User recipient, String message) {
String host = "localhost";
Properties properties = System.getProperties();
properties.setProperty("mail.smtp.host", host);
Session session = Session.getDefaultInstance(properties, null);
try {
javax.mail.Message msg = new MimeMessage(session);
msg.setFrom(new InternetAddress("reminder@chronocommand.eu", "Example.com Admin"));
msg.addRecipient(javax.mail.Message.RecipientType.TO,
new InternetAddress(recipient.getEmail(), recipient.getRealname()));
msg.setSubject("ChronoCommand Reminder");
msg.setText(message);
Transport.send(msg);
} catch (AddressException e) {
LOGGER.error("Adress Exception", e);
} catch (MessagingException e) {
LOGGER.error("Messaging Exception", e);
}catch (java.io.UnsupportedEncodingException e) {
LOGGER.error("Unsupported encoding", e);
}
}
}
|
package com.github.sormuras.bach.internal;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
import com.github.sormuras.bach.Bach;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.nio.charset.StandardCharsets;
import java.util.Optional;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
class GitHubTests {
@Test
void simplicissimus() {
var baos = new ByteArrayOutputStream();
var log = new PrintStream(baos, true, StandardCharsets.UTF_8);
var bach = new Bach(log, Bach::newHttpClient);
var hub = new GitHub(bach, "sormuras", "simplicissimus");
assertTrue(hub.findLatestCommitHash().isPresent());
var module = "com.github.sormuras.simplicissimus";
assertEquals(Optional.empty(), hub.findReleasedModule(module, "0"));
assertEquals(Optional.empty(), hub.findReleasedModule(module, "1"));
assertEquals(Optional.empty(), hub.findReleasedModule(module, "1.3.1"));
assertEquals(
"https://github.com/sormuras/simplicissimus/releases/download/1.4/com.github.sormuras.simplicissimus@1.4.jar",
hub.findReleasedModule(module, "1.4").orElseThrow());
assertEquals(
"https://github.com/sormuras/simplicissimus/releases/download/1.4.6/com.github.sormuras.simplicissimus@1.4.6.jar",
hub.findReleasedModule(module, "1.4.6").orElseThrow());
assertEquals(
"https://github.com/sormuras/simplicissimus/releases/download/1.5/com.github.sormuras.simplicissimus@1.5.jar",
hub.findReleasedModule(module, "1.5").orElseThrow());
var latest = hub.findLatestReleaseTag().orElseThrow();
assertEquals(
"https://github.com/sormuras/simplicissimus/releases/download/"
+ latest
+ "/com.github.sormuras.simplicissimus@"
+ latest
+ ".jar",
hub.findReleasedModule(module, latest).orElseThrow());
}
@ParameterizedTest
@ValueSource(strings = {"early-access", "1-ea+2", "1-ea+1", "0"})
void sawdust(String version) {
var baos = new ByteArrayOutputStream();
var log = new PrintStream(baos, true, StandardCharsets.UTF_8);
var bach = new Bach(log, Bach::newHttpClient);
var hub = new GitHub(bach, "sormuras", "sawdust");
assertTrue(hub.findReleasedModule("com.github.sormuras.sawdust", version).isPresent());
assertTrue(hub.findReleasedModule("com.github.sormuras.sawdust.api", version).isPresent());
assertTrue(hub.findReleasedModule("com.github.sormuras.sawdust.core", version).isPresent());
}
}
|
package org.hanuna.gitalk.common.compressedlist;
import org.hanuna.gitalk.common.CacheGet;
import org.hanuna.gitalk.common.Get;
import org.hanuna.gitalk.common.compressedlist.generator.Generator;
import org.jetbrains.annotations.NotNull;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.List;
/**
* @author erokhins
* postion is position in CompressedList
* index is index positionElement in positionItems
*/
public class RuntimeGenerateCompressedList<T> implements CompressedList<T> {
private final CacheGet<Integer, T> cache = new CacheGet<Integer, T>(new Get<Integer, T>() {
@NotNull
@Override
public T get(@NotNull Integer key) {
return RuntimeGenerateCompressedList.this.get(key);
}
});
private final Generator<T> generator;
private final int intervalSave;
private final List<PositionItem> positionItems = new ArrayList<PositionItem>();
private int size;
public RuntimeGenerateCompressedList(Generator<T> generator, int size, int intervalSave) {
this.generator = generator;
this.intervalSave = intervalSave;
this.size = size;
T firstT = generator.generateFirst();
positionItems.add(new PositionItem(0, firstT));
int curPosition = intervalSave;
T prevT = firstT;
while (curPosition < size) {
prevT = generator.generate(prevT, intervalSave);
positionItems.add(new PositionItem(curPosition, prevT));
curPosition = curPosition + intervalSave;
}
}
public RuntimeGenerateCompressedList(Generator<T> generator, int size) {
this(generator, size, 20);
}
// returned index k, which is max from k: positionItems.get(k).getPosition() <= position
private int binarySearch(int position) {
assert positionItems.size() > 0;
int x = 0;
int y = positionItems.size() - 1;
while (y - x > 1) {
int z = (x + y) / 2;
if (positionItems.get(z).getPosition() <= position) {
x = z;
} else {
y = z;
}
}
if (positionItems.get(y).getPosition() <= position) {
return y;
}
return x;
}
@NotNull
@Override
public List<T> getList() {
return new AbstractList<T>() {
@Override
public T get(int index) {
return cache.get(index);
}
@Override
public int size() {
return size;
}
};
}
private void fixPositionsTail(int startIndex, int deltaSize) {
for (int i = startIndex; i < positionItems.size(); i++) {
PositionItem positionItem = positionItems.get(i);
positionItem.setPosition(positionItem.getPosition() + deltaSize);
}
}
private List<PositionItem> regenerateMediate(PositionItem prevSavePositionItem, int downSavePosition) {
List<PositionItem> mediateSave = new ArrayList<PositionItem>();
T prevT = prevSavePositionItem.getT();
int curTPosition = prevSavePositionItem.getPosition() + intervalSave;
while (curTPosition < downSavePosition - intervalSave) {
prevT = generator.generate(prevT, intervalSave);
mediateSave.add(new PositionItem(curTPosition, prevT));
curTPosition = curTPosition + intervalSave;
}
return mediateSave;
}
private void checkReplace(Replace replace) {
if (replace.to() >= size) {
throw new IllegalArgumentException("size= "+size + "Bad replace: " + replace);
}
}
@Override
public void recalculate(@NotNull Replace replace) {
if (replace == Replace.ID_REPLACE) {
return;
}
checkReplace(replace);
cache.clear();
int deltaSize = replace.addedElementCount() - replace.removedElementCount();
int upSaveIndex = binarySearch(replace.from());
if (upSaveIndex > 0) { // update started from replace.from()
upSaveIndex
} else {
positionItems.set(0, new PositionItem(0, generator.generateFirst()));
}
PositionItem upSavePositionItem = positionItems.get(upSaveIndex);
int downSaveIndex = upSaveIndex;
while (downSaveIndex < positionItems.size() && positionItems.get(downSaveIndex).getPosition() <= replace.to()) {
downSaveIndex++;
}
size = size + deltaSize;
fixPositionsTail(downSaveIndex, deltaSize);
int downSavePosition = size;
if (downSaveIndex < positionItems.size()) {
downSavePosition = positionItems.get(downSaveIndex).getPosition();
}
List<PositionItem> mediate = regenerateMediate(upSavePositionItem, downSavePosition);
positionItems.subList(upSaveIndex + 1, downSaveIndex).clear();
positionItems.addAll(upSaveIndex + 1, mediate);
}
@NotNull
private T get(int position) {
if (position < 0 || position >= size) {
throw new IllegalArgumentException();
}
int saveIndex = binarySearch(position);
final PositionItem positionItem = positionItems.get(saveIndex);
assert position >= positionItem.getPosition();
return generator.generate(positionItem.getT(), position - positionItem.getPosition());
}
private class PositionItem {
private int position;
private final T t;
private PositionItem(int position, @NotNull T t) {
this.position = position;
this.t = t;
}
public int getPosition() {
return position;
}
@NotNull
public T getT() {
return t;
}
public void setPosition(int position) {
this.position = position;
}
}
}
|
package integratedtoolkit.util;
import integratedtoolkit.log.Loggers;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import javax.sound.sampled.Line;
import java.io.*;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class TraceMerger {
protected static final Logger logger = LogManager.getLogger(Loggers.TRACING);
protected static final boolean debug = logger.isDebugEnabled();
// Info used for matching sync events
private static final Integer SYNC_TYPE = 8000666;
private String syncRegex = "(^\\d+:\\d+:\\d+):(\\d+):(\\d+):(\\d+).*:" + SYNC_TYPE + ":(\\d+)";
private Pattern syncPattern = Pattern.compile(syncRegex);
// Selectors for replace Pattern
private static final Integer R_ID_INDEX = 1;
private static final Integer TIMESTAMP_INDEX = 4;
private static final Integer WORKER_ID_INDEX = 2; // could be wrong this regex (designed for matching tasks not workers)
private String workerThreadInfo = "(^\\d+):(\\d+):(\\d+):(\\d+):(\\d+):(\\d+):(.*)";
private Pattern workerThreadInfoPattern = Pattern.compile(workerThreadInfo);
private static final Integer WORKER_THREAD_ID = 2;
private static final Integer WORKER_TIMESTAMP = 6;
private static final Integer WORKER_LINE_INFO = 7;
private static final String masterTraceSuffix = "_compss_trace_";
private static final String traceExtension = ".prv";
private static final String workerTraceSuffix = "_python_trace" + traceExtension;
private static final String traceSubDir = "trace";
private static final String workerSubDir = "python";
private static String workingDir;
private FileWriter fw;
private BufferedWriter bw;
private PrintWriter masterWriter;
private File masterTrace;
private File[] workersTraces;
private String masterTracePath;
private String[] workersTracePath;
private class LineInfo {
private final String resourceId;
private final Long timestamp;
public LineInfo(String resourceID, Long timestamp) {
this.resourceId = resourceID;
this.timestamp = timestamp;
}
public String getResourceId() {
return resourceId;
}
public Long getTimestamp() {
return timestamp;
}
}
public TraceMerger(String workingDir, String appName) throws IOException {
initMasterTraceInfo(workingDir, appName);
initWorkersTracesInfo(workingDir);
fw = new FileWriter(masterTracePath, true);
bw = new BufferedWriter(fw);
masterWriter = new PrintWriter(bw);
logger.debug("Trace's merger initialization successful");
}
private void initMasterTraceInfo(String workingDir, String appName) throws FileNotFoundException {
final String traceNamePrefix = appName + masterTraceSuffix;
File f = new File(workingDir + File.separator + traceSubDir);
File[] matchingFiles = f.listFiles((File dir, String name) -> name.startsWith(traceNamePrefix) && name.endsWith(traceExtension));
if (!(matchingFiles.length < 1)) {
masterTrace = matchingFiles[0];
masterTracePath = masterTrace.getAbsolutePath();
if (matchingFiles.length > 1) {
logger.warn("Found more than one master trace, using " + masterTrace + " to merge.");
}
} else {
throw new FileNotFoundException("Master trace " + traceNamePrefix + "*" + traceExtension + " not found.");
}
}
private void initWorkersTracesInfo(String workingDir) throws FileNotFoundException {
TraceMerger.workingDir = workingDir;
File f = new File(workingDir + File.separator + traceSubDir + File.separator + workerSubDir);
File[] matchingFiles = f.listFiles((File dir, String name) -> name.endsWith(workerTraceSuffix));
if (matchingFiles == null) {
throw new FileNotFoundException("No workers traces to merge found.");
} else {
workersTraces = matchingFiles;
}
workersTracePath = new String[workersTraces.length];
for (int i = 0; i < workersTracePath.length; ++i) {
workersTracePath[i] = workersTraces[i].getAbsolutePath();
}
}
public void merge() throws IOException {
logger.debug("Parsing master sync events");
HashMap<Integer, List<LineInfo> > masterSyncEvents = getSyncEvents(masterTracePath, -1);
logger.debug("Proceeding to merge task traces into master");
for (File workerFile : workersTraces) {
String workerFileName = workerFile.getName();
String wID = "";
for (int i = 0; workerFileName.charAt(i) != '_'; ++i){
wID += workerFileName.charAt(i);
}
Integer workerID = Integer.parseInt(wID);
workerID++; // first worker is resource number 2
List<String> cleanLines = getWorkerEvents(workerFile);
HashMap<Integer, List<LineInfo> > workerSyncEvents = getSyncEvents(workerFile.getPath(), workerID);
writeWorkerEvents(masterSyncEvents, workerSyncEvents, cleanLines, workerID);
if (!debug) {
removeFolder(workingDir + File.separator + traceSubDir + File.separator + workerSubDir);
}
}
masterWriter.close();
logger.debug("Merging finished.");
}
private void add(HashMap<Integer, List<LineInfo> > map, Integer key, LineInfo newValue) {
List currentValue = map.get(key);
if (currentValue == null) {
currentValue = new ArrayList();
map.put(key, currentValue);
}
currentValue.add(newValue);
}
private HashMap<Integer, List<LineInfo>> getSyncEvents(String tracePath, Integer workerID) throws IOException {
FileInputStream inputStream = null;
Scanner sc = null;
HashMap<Integer, List<LineInfo> > idToSyncInfo = new HashMap<>();
try {
inputStream = new FileInputStream(tracePath);
sc = new Scanner(inputStream, "UTF-8");
while (sc.hasNextLine()) {
String line = sc.nextLine();
Matcher m = syncPattern.matcher(line);
if (m.find()) {
Integer wID = (workerID == -1) ? Integer.parseInt(m.group(WORKER_ID_INDEX)) : workerID;
String resourceID = m.group(R_ID_INDEX);
Long timestamp = Long.parseLong(m.group(TIMESTAMP_INDEX));
add(idToSyncInfo, wID, new LineInfo(resourceID, timestamp));
}
}
// note that Scanner suppresses exceptions
if (sc.ioException() != null) {
throw sc.ioException();
}
} finally {
if (inputStream != null) {
inputStream.close();
}
if (sc != null) {
sc.close();
}
}
return idToSyncInfo;
}
private List<String> getWorkerEvents(File worker) throws IOException {
List<String> lines = Files.readAllLines(Paths.get(worker.getAbsolutePath()), StandardCharsets.UTF_8);
int startIndex = 1; // Remove header
int endIndex = lines.size() - 1;
return lines.subList(startIndex, endIndex);
}
private void writeWorkerEvents(HashMap<Integer, List<LineInfo> > masterSyncEvents,
HashMap<Integer, List<LineInfo> > workerSyncEvents,
List<String> eventsLine, Integer workerID) {
LineInfo workerHeader = getWorkerInfo(masterSyncEvents.get(workerID), workerSyncEvents.get(workerID));
for (String line : eventsLine) {
String newEvent = updateEvent(workerHeader, line, workerID);
masterWriter.println(newEvent);
}
}
private String updateEvent(LineInfo workerHeader, String line, Integer workerID) {
Matcher taskMatcher = workerThreadInfoPattern.matcher(line);
String newLine = "";
if (taskMatcher.find()) {
String baseWorkerHeader = workerHeader.getResourceId();
Integer threadID = Integer.parseInt(taskMatcher.group(WORKER_THREAD_ID));
String eventHeader = baseWorkerHeader + ":" + workerID + ":" + threadID; // WRONG
Long timestamp = workerHeader.getTimestamp() + Long.parseLong(taskMatcher.group(WORKER_TIMESTAMP));
String lineInfo = taskMatcher.group(WORKER_LINE_INFO);
newLine = eventHeader + ":" + timestamp + ":" + lineInfo;
}
return newLine;
}
private LineInfo getWorkerInfo(List<LineInfo> masterSyncEvents, List<LineInfo> workerSyncEvents) {
LineInfo javaStart = masterSyncEvents.get(0);
LineInfo javaEnd = masterSyncEvents.get(1);
LineInfo workerStart = workerSyncEvents.get(0);
LineInfo workerEnd = workerSyncEvents.get(1);
Long javaTime = Math.abs(javaStart.getTimestamp() - javaEnd.getTimestamp());
Long workerTime = Math.abs(workerStart.getTimestamp() - workerEnd.getTimestamp());
Long overhead = (javaTime - workerTime) / 2;
return new LineInfo(javaStart.resourceId, javaStart.getTimestamp() + overhead);
}
private void removeFolder(String folderPath) throws IOException {
File folder = new File(folderPath);
remove(folder);
}
private void remove(File f) throws IOException {
if (f.exists()) {
if (f.isDirectory()) {
for (File child : f.listFiles()) {
remove(child);
}
}
Files.delete(f.toPath());
}
}
}
|
/**
* Autogenerated by Thrift Compiler (0.9.3)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
*
* @generated
*/
package com.cinchapi.concourse.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.util.Map.Entry;
import javax.annotation.Generated;
import com.cinchapi.concourse.util.Convert;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
@SuppressWarnings({ "cast", "rawtypes", "serial", "unchecked" })
/**
* A recursive structure that encodes one or more {@link TObject TObjects}.
*
* <p>
* The most basic {@link ComplexTObject} is a
* {@link ComplexTObjectType#SCALAR scalar}, which is just a wrapped
* {@link TObject}. Beyond that, complex collections can be represented as a
* {@link Set}, {@link List} or {@link Map} of
* {@link ComplexTObject ComplexTObjects}.
* </p>
*/
@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)", date = "2016-06-13")
public class ComplexTObject implements
org.apache.thrift.TBase<ComplexTObject, ComplexTObject._Fields>,
java.io.Serializable,
Cloneable,
Comparable<ComplexTObject> {
/**
* Create a new {@link ComplexTObject} from the specified java
* {@code object}.
* The original object can be retrieved using the {@link #getJavaObject()}
* method.
*
* @param object the object to wrap within the {@link ComplexTObject}.
* @return the ComplexTObject
*/
public static <T> ComplexTObject fromJavaObject(T object) {
ComplexTObject complex = new ComplexTObject();
if(object instanceof Map) {
Map<?, ?> map = (Map<?, ?>) object;
complex.setType(ComplexTObjectType.MAP);
Map<ComplexTObject, ComplexTObject> tmap = Maps.newLinkedHashMap();
for (Entry<?, ?> entry : map.entrySet()) {
tmap.put(fromJavaObject(entry.getKey()),
fromJavaObject(entry.getValue()));
}
complex.setTmap(tmap);
}
else if(object instanceof List) {
List<?> list = (List<?>) object;
complex.setType(ComplexTObjectType.LIST);
List<ComplexTObject> tlist = Lists.newArrayList();
for (Object elt : list) {
tlist.add(fromJavaObject(elt));
}
complex.setTlist(tlist);
}
else if(object instanceof Set) {
Set<?> set = (Set<?>) object;
complex.setType(ComplexTObjectType.SET);
Set<ComplexTObject> tset = Sets.newLinkedHashSet();
for (Object elt : set) {
tset.add(fromJavaObject(elt));
}
complex.setTset(tset);
}
else if(object instanceof TObject) {
complex.setType(ComplexTObjectType.TOBJECT);
complex.setTobject((TObject) object);
}
else if(object instanceof TCriteria) {
complex.setType(ComplexTObjectType.TCRITERIA);
complex.setTcriteria((TCriteria) object);
}
else {
complex.setType(ComplexTObjectType.SCALAR);
complex.setTscalar(Convert.javaToThrift(object));
}
return complex;
}
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
// isset id assignments
@SuppressWarnings("unused")
private static final _Fields optionals[] = { _Fields.TSCALAR, _Fields.TMAP,
_Fields.TLIST, _Fields.TSET };
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct(
"ComplexTObject");
private static final org.apache.thrift.protocol.TField TLIST_FIELD_DESC = new org.apache.thrift.protocol.TField(
"tlist", org.apache.thrift.protocol.TType.LIST, (short) 4);
private static final org.apache.thrift.protocol.TField TMAP_FIELD_DESC = new org.apache.thrift.protocol.TField(
"tmap", org.apache.thrift.protocol.TType.MAP, (short) 3);
private static final org.apache.thrift.protocol.TField TSCALAR_FIELD_DESC = new org.apache.thrift.protocol.TField(
"tscalar", org.apache.thrift.protocol.TType.STRUCT, (short) 2);
private static final org.apache.thrift.protocol.TField TSET_FIELD_DESC = new org.apache.thrift.protocol.TField(
"tset", org.apache.thrift.protocol.TType.SET, (short) 5);
private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField(
"type", org.apache.thrift.protocol.TType.I32, (short) 1);
static {
schemes.put(StandardScheme.class,
new ComplexTObjectStandardSchemeFactory());
schemes.put(TupleScheme.class, new ComplexTObjectTupleSchemeFactory());
}
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(
_Fields.class);
tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData(
"type", org.apache.thrift.TFieldRequirementType.REQUIRED,
new org.apache.thrift.meta_data.EnumMetaData(
org.apache.thrift.protocol.TType.ENUM,
ComplexTObjectType.class)));
tmpMap.put(_Fields.TSCALAR,
new org.apache.thrift.meta_data.FieldMetaData("tscalar",
org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.StructMetaData(
org.apache.thrift.protocol.TType.STRUCT,
com.cinchapi.concourse.thrift.TObject.class)));
tmpMap.put(_Fields.TMAP, new org.apache.thrift.meta_data.FieldMetaData(
"tmap", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.MapMetaData(
org.apache.thrift.protocol.TType.MAP,
new org.apache.thrift.meta_data.FieldValueMetaData(
org.apache.thrift.protocol.TType.STRUCT,
"ComplexTObject"),
new org.apache.thrift.meta_data.FieldValueMetaData(
org.apache.thrift.protocol.TType.STRUCT,
"ComplexTObject"))));
tmpMap.put(
_Fields.TLIST,
new org.apache.thrift.meta_data.FieldMetaData(
"tlist",
org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.ListMetaData(
org.apache.thrift.protocol.TType.LIST,
new org.apache.thrift.meta_data.FieldValueMetaData(
org.apache.thrift.protocol.TType.STRUCT,
"ComplexTObject"))));
tmpMap.put(_Fields.TSET, new org.apache.thrift.meta_data.FieldMetaData(
"tset", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.SetMetaData(
org.apache.thrift.protocol.TType.SET,
new org.apache.thrift.meta_data.FieldValueMetaData(
org.apache.thrift.protocol.TType.STRUCT,
"ComplexTObject"))));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(
ComplexTObject.class, metaDataMap);
}
public List<ComplexTObject> tlist; // optional
public Map<ComplexTObject, ComplexTObject> tmap; // optional
public com.cinchapi.concourse.thrift.TObject tscalar; // optional
public Set<ComplexTObject> tset; // optional
public TObject tobject; // optional
public TCriteria tcriteria; // optional
/**
*
* @see ComplexTObjectType
*/
public ComplexTObjectType type; // required
public ComplexTObject() {}
/**
* Transient variable to store computed object
* */
private transient Object cachedObject;
/**
* Performs a deep copy on <i>other</i>.
*/
public ComplexTObject(ComplexTObject other) {
if(other.isSetType()) {
this.type = other.type;
}
if(other.isSetTscalar()) {
this.tscalar = new com.cinchapi.concourse.thrift.TObject(
other.tscalar);
}
if(other.isSetTmap()) {
Map<ComplexTObject, ComplexTObject> __this__tmap = new HashMap<ComplexTObject, ComplexTObject>(
other.tmap.size());
for (Map.Entry<ComplexTObject, ComplexTObject> other_element : other.tmap
.entrySet()) {
ComplexTObject other_element_key = other_element.getKey();
ComplexTObject other_element_value = other_element.getValue();
ComplexTObject __this__tmap_copy_key = other_element_key;
ComplexTObject __this__tmap_copy_value = other_element_value;
__this__tmap
.put(__this__tmap_copy_key, __this__tmap_copy_value);
}
this.tmap = __this__tmap;
}
if(other.isSetTlist()) {
List<ComplexTObject> __this__tlist = new ArrayList<ComplexTObject>(
other.tlist.size());
for (ComplexTObject other_element : other.tlist) {
__this__tlist.add(other_element);
}
this.tlist = __this__tlist;
}
if(other.isSetTset()) {
Set<ComplexTObject> __this__tset = new HashSet<ComplexTObject>(
other.tset.size());
for (ComplexTObject other_element : other.tset) {
__this__tset.add(other_element);
}
this.tset = __this__tset;
}
}
public ComplexTObject(ComplexTObjectType type) {
this();
this.type = type;
}
public void addToTlist(ComplexTObject elem) {
if(this.tlist == null) {
this.tlist = new ArrayList<ComplexTObject>();
}
this.tlist.add(elem);
}
public void addToTset(ComplexTObject elem) {
if(this.tset == null) {
this.tset = new HashSet<ComplexTObject>();
}
this.tset.add(elem);
}
@Override
public void clear() {
this.type = null;
this.tscalar = null;
this.tmap = null;
this.tlist = null;
this.tset = null;
}
@Override
public int compareTo(ComplexTObject other) {
if(!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetType()).compareTo(
other.isSetType());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetType()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type,
other.type);
if(lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTscalar()).compareTo(
other.isSetTscalar());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetTscalar()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(
this.tscalar, other.tscalar);
if(lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTmap()).compareTo(
other.isSetTmap());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetTmap()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tmap,
other.tmap);
if(lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTlist()).compareTo(
other.isSetTlist());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetTlist()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(
this.tlist, other.tlist);
if(lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetTset()).compareTo(
other.isSetTset());
if(lastComparison != 0) {
return lastComparison;
}
if(isSetTset()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tset,
other.tset);
if(lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public ComplexTObject deepCopy() {
return new ComplexTObject(this);
}
public boolean equals(ComplexTObject that) {
if(that == null)
return false;
boolean this_present_type = true && this.isSetType();
boolean that_present_type = true && that.isSetType();
if(this_present_type || that_present_type) {
if(!(this_present_type && that_present_type))
return false;
if(!this.type.equals(that.type))
return false;
}
boolean this_present_tscalar = true && this.isSetTscalar();
boolean that_present_tscalar = true && that.isSetTscalar();
if(this_present_tscalar || that_present_tscalar) {
if(!(this_present_tscalar && that_present_tscalar))
return false;
if(!this.tscalar.equals(that.tscalar))
return false;
}
boolean this_present_tmap = true && this.isSetTmap();
boolean that_present_tmap = true && that.isSetTmap();
if(this_present_tmap || that_present_tmap) {
if(!(this_present_tmap && that_present_tmap))
return false;
if(!this.tmap.equals(that.tmap))
return false;
}
boolean this_present_tlist = true && this.isSetTlist();
boolean that_present_tlist = true && that.isSetTlist();
if(this_present_tlist || that_present_tlist) {
if(!(this_present_tlist && that_present_tlist))
return false;
if(!this.tlist.equals(that.tlist))
return false;
}
boolean this_present_tset = true && this.isSetTset();
boolean that_present_tset = true && that.isSetTset();
if(this_present_tset || that_present_tset) {
if(!(this_present_tset && that_present_tset))
return false;
if(!this.tset.equals(that.tset))
return false;
}
return true;
}
@Override
public boolean equals(Object that) {
if(that == null)
return false;
if(that instanceof ComplexTObject)
return this.equals((ComplexTObject) that);
return false;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public Object getFieldValue(_Fields field) {
switch (field) {
case TYPE:
return getType();
case TSCALAR:
return getTscalar();
case TMAP:
return getTmap();
case TLIST:
return getTlist();
case TSET:
return getTset();
}
throw new IllegalStateException();
}
/**
* Return the canonical java object that is wrapped within this
* {@link ComplexTObject}.
*
* @return the wrapped java object
*/
public <T> T getJavaObject() {
if(this.cachedObject != null) {
return (T) this.cachedObject;
}
if(type == ComplexTObjectType.MAP) {
Map<ComplexTObject, ComplexTObject> tmap = getTmap();
Map<Object, Object> map = Maps.newLinkedHashMap();
for (Entry<ComplexTObject, ComplexTObject> entry : tmap.entrySet()) {
map.put(entry.getKey().getJavaObject(), entry.getValue()
.getJavaObject());
}
cachedObject = (T) map;
return (T) map;
}
else if(type == ComplexTObjectType.LIST) {
List<ComplexTObject> tlist = getTlist();
List<Object> list = Lists.newArrayListWithCapacity(tlist.size());
for (ComplexTObject elt : tlist) {
list.add(elt.getJavaObject());
}
cachedObject = (T) list;
return (T) list;
}
else if(type == ComplexTObjectType.SET) {
Set<ComplexTObject> tset = getTset();
Set<Object> set = Sets
.newLinkedHashSetWithExpectedSize(tset.size());
for (ComplexTObject elt : tset) {
set.add(elt.getJavaObject());
}
cachedObject = (T) set;
return (T) set;
}
else if(type == ComplexTObjectType.TOBJECT) {
cachedObject = (T) getTobject();
return (T) getTobject();
}
else if(type == ComplexTObjectType.TCRITERIA) {
cachedObject = (T) getTcriteria();
return (T) getTcriteria();
}
else {
TObject tscalar = getTscalar();
cachedObject = (T) Convert.thriftToJava(tscalar);
return (T) cachedObject;
}
}
public List<ComplexTObject> getTlist() {
return this.tlist;
}
public java.util.Iterator<ComplexTObject> getTlistIterator() {
return (this.tlist == null) ? null : this.tlist.iterator();
}
public int getTlistSize() {
return (this.tlist == null) ? 0 : this.tlist.size();
}
public Map<ComplexTObject, ComplexTObject> getTmap() {
return this.tmap;
}
public int getTmapSize() {
return (this.tmap == null) ? 0 : this.tmap.size();
}
public com.cinchapi.concourse.thrift.TObject getTscalar() {
return this.tscalar;
}
public Set<ComplexTObject> getTset() {
return this.tset;
}
public java.util.Iterator<ComplexTObject> getTsetIterator() {
return (this.tset == null) ? null : this.tset.iterator();
}
public int getTsetSize() {
return (this.tset == null) ? 0 : this.tset.size();
}
/**
*
* @see ComplexTObjectType
*/
public ComplexTObjectType getType() {
return this.type;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_type = true && (isSetType());
list.add(present_type);
if(present_type)
list.add(type.getValue());
boolean present_tscalar = true && (isSetTscalar());
list.add(present_tscalar);
if(present_tscalar)
list.add(tscalar);
boolean present_tmap = true && (isSetTmap());
list.add(present_tmap);
if(present_tmap)
list.add(tmap);
boolean present_tlist = true && (isSetTlist());
list.add(present_tlist);
if(present_tlist)
list.add(tlist);
boolean present_tset = true && (isSetTset());
list.add(present_tset);
if(present_tset)
list.add(tset);
return list.hashCode();
}
/**
* Returns true if field corresponding to fieldID is set (has been assigned
* a value) and false otherwise
*/
public boolean isSet(_Fields field) {
if(field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case TYPE:
return isSetType();
case TSCALAR:
return isSetTscalar();
case TMAP:
return isSetTmap();
case TLIST:
return isSetTlist();
case TSET:
return isSetTset();
}
throw new IllegalStateException();
}
/**
* Returns true if field tlist is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetTlist() {
return this.tlist != null;
}
/**
* Returns true if field tmap is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetTmap() {
return this.tmap != null;
}
/**
* Returns true if field tscalar is set (has been assigned a value) and
* false otherwise
*/
public boolean isSetTscalar() {
return this.tscalar != null;
}
/**
* Returns true if field tset is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetTset() {
return this.tset != null;
}
/**
* Returns true if field type is set (has been assigned a value) and false
* otherwise
*/
public boolean isSetType() {
return this.type != null;
}
public void putToTmap(ComplexTObject key, ComplexTObject val) {
if(this.tmap == null) {
this.tmap = new HashMap<ComplexTObject, ComplexTObject>();
}
this.tmap.put(key, val);
}
public void read(org.apache.thrift.protocol.TProtocol iprot)
throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case TYPE:
if(value == null) {
unsetType();
}
else {
setType((ComplexTObjectType) value);
}
break;
case TSCALAR:
if(value == null) {
unsetTscalar();
}
else {
setTscalar((com.cinchapi.concourse.thrift.TObject) value);
}
break;
case TMAP:
if(value == null) {
unsetTmap();
}
else {
setTmap((Map<ComplexTObject, ComplexTObject>) value);
}
break;
case TLIST:
if(value == null) {
unsetTlist();
}
else {
setTlist((List<ComplexTObject>) value);
}
break;
case TSET:
if(value == null) {
unsetTset();
}
else {
setTset((Set<ComplexTObject>) value);
}
break;
}
}
public ComplexTObject setTlist(List<ComplexTObject> tlist) {
this.tlist = tlist;
return this;
}
public void setTlistIsSet(boolean value) {
if(!value) {
this.tlist = null;
}
}
public ComplexTObject setTmap(Map<ComplexTObject, ComplexTObject> tmap) {
this.tmap = tmap;
return this;
}
public void setTmapIsSet(boolean value) {
if(!value) {
this.tmap = null;
}
}
public ComplexTObject setTscalar(
com.cinchapi.concourse.thrift.TObject tscalar) {
this.tscalar = tscalar;
return this;
}
public void setTscalarIsSet(boolean value) {
if(!value) {
this.tscalar = null;
}
}
public ComplexTObject setTset(Set<ComplexTObject> tset) {
this.tset = tset;
return this;
}
public void setTsetIsSet(boolean value) {
if(!value) {
this.tset = null;
}
}
public ComplexTObject setTobject(
com.cinchapi.concourse.thrift.TObject tobject) {
this.tobject = tobject;
return this;
}
public void unsetTcriteria() {
this.tcriteria = null;
}
public TCriteria getTcriteria() {
return this.tcriteria;
}
public ComplexTObject setTcriteria(
com.cinchapi.concourse.thrift.TCriteria tcriteria) {
this.tcriteria = tcriteria;
return this;
}
public void unsetTobject() {
this.tobject = null;
}
public TObject getTobject() {
return this.tobject;
}
/**
*
* @see ComplexTObjectType
*/
public ComplexTObject setType(ComplexTObjectType type) {
this.type = type;
return this;
}
public void setTypeIsSet(boolean value) {
if(!value) {
this.type = null;
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ComplexTObject(");
boolean first = true;
sb.append("type:");
if(this.type == null) {
sb.append("null");
}
else {
sb.append(this.type);
}
first = false;
if(isSetTscalar()) {
if(!first)
sb.append(", ");
sb.append("tscalar:");
if(this.tscalar == null) {
sb.append("null");
}
else {
sb.append(this.tscalar);
}
first = false;
}
if(isSetTmap()) {
if(!first)
sb.append(", ");
sb.append("tmap:");
if(this.tmap == null) {
sb.append("null");
}
else {
sb.append(this.tmap);
}
first = false;
}
if(isSetTlist()) {
if(!first)
sb.append(", ");
sb.append("tlist:");
if(this.tlist == null) {
sb.append("null");
}
else {
sb.append(this.tlist);
}
first = false;
}
if(isSetTset()) {
if(!first)
sb.append(", ");
sb.append("tset:");
if(this.tset == null) {
sb.append("null");
}
else {
sb.append(this.tset);
}
first = false;
}
sb.append(")");
return sb.toString();
}
public void unsetTlist() {
this.tlist = null;
}
public void unsetTmap() {
this.tmap = null;
}
public void unsetTscalar() {
this.tscalar = null;
}
public void unsetTset() {
this.tset = null;
}
public void unsetType() {
this.type = null;
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
if(type == null) {
throw new org.apache.thrift.protocol.TProtocolException(
"Required field 'type' was not present! Struct: "
+ toString());
}
// check for sub-struct validity
if(tscalar != null) {
tscalar.validate();
}
}
public void write(org.apache.thrift.protocol.TProtocol oprot)
throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
private void readObject(java.io.ObjectInputStream in)
throws java.io.IOException, ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(
new org.apache.thrift.transport.TIOStreamTransport(in)));
}
catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void writeObject(java.io.ObjectOutputStream out)
throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(
new org.apache.thrift.transport.TIOStreamTransport(out)));
}
catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
/**
* The set of fields this struct contains, along with convenience methods
* for finding and manipulating them.
*/
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
TLIST((short) 4, "tlist"),
TMAP((short) 3, "tmap"),
TSCALAR((short) 2, "tscalar"),
TSET((short) 5, "tset"),
/**
*
* @see ComplexTObjectType
*/
TYPE((short) 1, "type");
/**
* Find the _Fields constant that matches name, or null if its not
* found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
/**
* Find the _Fields constant that matches fieldId, or null if its not
* found.
*/
public static _Fields findByThriftId(int fieldId) {
switch (fieldId) {
case 1: // TYPE
return TYPE;
case 2: // TSCALAR
return TSCALAR;
case 3: // TMAP
return TMAP;
case 4: // TLIST
return TLIST;
case 5: // TSET
return TSET;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if(fields == null)
throw new IllegalArgumentException("Field " + fieldId
+ " doesn't exist!");
return fields;
}
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
private final String _fieldName;
private final short _thriftId;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public String getFieldName() {
return _fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
}
private static class ComplexTObjectStandardScheme extends
StandardScheme<ComplexTObject> {
public void read(org.apache.thrift.protocol.TProtocol iprot,
ComplexTObject struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true) {
schemeField = iprot.readFieldBegin();
if(schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // TYPE
if(schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.type = com.cinchapi.concourse.thrift.ComplexTObjectType
.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
case 2: // TSCALAR
if(schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
struct.tscalar = new com.cinchapi.concourse.thrift.TObject();
struct.tscalar.read(iprot);
struct.setTscalarIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
case 3: // TMAP
if(schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
org.apache.thrift.protocol.TMap _map0 = iprot
.readMapBegin();
struct.tmap = new HashMap<ComplexTObject, ComplexTObject>(
2 * _map0.size);
ComplexTObject _key1;
ComplexTObject _val2;
for (int _i3 = 0; _i3 < _map0.size; ++_i3) {
_key1 = new ComplexTObject();
_key1.read(iprot);
_val2 = new ComplexTObject();
_val2.read(iprot);
struct.tmap.put(_key1, _val2);
}
iprot.readMapEnd();
}
struct.setTmapIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
case 4: // TLIST
if(schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
org.apache.thrift.protocol.TList _list4 = iprot
.readListBegin();
struct.tlist = new ArrayList<ComplexTObject>(
_list4.size);
ComplexTObject _elem5;
for (int _i6 = 0; _i6 < _list4.size; ++_i6) {
_elem5 = new ComplexTObject();
_elem5.read(iprot);
struct.tlist.add(_elem5);
}
iprot.readListEnd();
}
struct.setTlistIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
case 5: // TSET
if(schemeField.type == org.apache.thrift.protocol.TType.SET) {
{
org.apache.thrift.protocol.TSet _set7 = iprot
.readSetBegin();
struct.tset = new HashSet<ComplexTObject>(
2 * _set7.size);
ComplexTObject _elem8;
for (int _i9 = 0; _i9 < _set7.size; ++_i9) {
_elem8 = new ComplexTObject();
_elem8.read(iprot);
struct.tset.add(_elem8);
}
iprot.readSetEnd();
}
struct.setTsetIsSet(true);
}
else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot,
schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be
// checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot,
ComplexTObject struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if(struct.type != null) {
oprot.writeFieldBegin(TYPE_FIELD_DESC);
oprot.writeI32(struct.type.getValue());
oprot.writeFieldEnd();
}
if(struct.tscalar != null) {
if(struct.isSetTscalar()) {
oprot.writeFieldBegin(TSCALAR_FIELD_DESC);
struct.tscalar.write(oprot);
oprot.writeFieldEnd();
}
}
if(struct.tmap != null) {
if(struct.isSetTmap()) {
oprot.writeFieldBegin(TMAP_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(
org.apache.thrift.protocol.TType.STRUCT,
org.apache.thrift.protocol.TType.STRUCT,
struct.tmap.size()));
for (Map.Entry<ComplexTObject, ComplexTObject> _iter10 : struct.tmap
.entrySet()) {
_iter10.getKey().write(oprot);
_iter10.getValue().write(oprot);
}
oprot.writeMapEnd();
}
oprot.writeFieldEnd();
}
}
if(struct.tlist != null) {
if(struct.isSetTlist()) {
oprot.writeFieldBegin(TLIST_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(
org.apache.thrift.protocol.TType.STRUCT,
struct.tlist.size()));
for (ComplexTObject _iter11 : struct.tlist) {
_iter11.write(oprot);
}
oprot.writeListEnd();
}
oprot.writeFieldEnd();
}
}
if(struct.tset != null) {
if(struct.isSetTset()) {
oprot.writeFieldBegin(TSET_FIELD_DESC);
{
oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(
org.apache.thrift.protocol.TType.STRUCT,
struct.tset.size()));
for (ComplexTObject _iter12 : struct.tset) {
_iter12.write(oprot);
}
oprot.writeSetEnd();
}
oprot.writeFieldEnd();
}
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ComplexTObjectStandardSchemeFactory implements
SchemeFactory {
public ComplexTObjectStandardScheme getScheme() {
return new ComplexTObjectStandardScheme();
}
}
private static class ComplexTObjectTupleScheme extends
TupleScheme<ComplexTObject> {
@Override
public void read(org.apache.thrift.protocol.TProtocol prot,
ComplexTObject struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
struct.type = com.cinchapi.concourse.thrift.ComplexTObjectType
.findByValue(iprot.readI32());
struct.setTypeIsSet(true);
BitSet incoming = iprot.readBitSet(4);
if(incoming.get(0)) {
struct.tscalar = new com.cinchapi.concourse.thrift.TObject();
struct.tscalar.read(iprot);
struct.setTscalarIsSet(true);
}
if(incoming.get(1)) {
{
org.apache.thrift.protocol.TMap _map16 = new org.apache.thrift.protocol.TMap(
org.apache.thrift.protocol.TType.STRUCT,
org.apache.thrift.protocol.TType.STRUCT,
iprot.readI32());
struct.tmap = new HashMap<ComplexTObject, ComplexTObject>(
2 * _map16.size);
ComplexTObject _key17;
ComplexTObject _val18;
for (int _i19 = 0; _i19 < _map16.size; ++_i19) {
_key17 = new ComplexTObject();
_key17.read(iprot);
_val18 = new ComplexTObject();
_val18.read(iprot);
struct.tmap.put(_key17, _val18);
}
}
struct.setTmapIsSet(true);
}
if(incoming.get(2)) {
{
org.apache.thrift.protocol.TList _list20 = new org.apache.thrift.protocol.TList(
org.apache.thrift.protocol.TType.STRUCT,
iprot.readI32());
struct.tlist = new ArrayList<ComplexTObject>(_list20.size);
ComplexTObject _elem21;
for (int _i22 = 0; _i22 < _list20.size; ++_i22) {
_elem21 = new ComplexTObject();
_elem21.read(iprot);
struct.tlist.add(_elem21);
}
}
struct.setTlistIsSet(true);
}
if(incoming.get(3)) {
{
org.apache.thrift.protocol.TSet _set23 = new org.apache.thrift.protocol.TSet(
org.apache.thrift.protocol.TType.STRUCT,
iprot.readI32());
struct.tset = new HashSet<ComplexTObject>(2 * _set23.size);
ComplexTObject _elem24;
for (int _i25 = 0; _i25 < _set23.size; ++_i25) {
_elem24 = new ComplexTObject();
_elem24.read(iprot);
struct.tset.add(_elem24);
}
}
struct.setTsetIsSet(true);
}
}
@Override
public void write(org.apache.thrift.protocol.TProtocol prot,
ComplexTObject struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
oprot.writeI32(struct.type.getValue());
BitSet optionals = new BitSet();
if(struct.isSetTscalar()) {
optionals.set(0);
}
if(struct.isSetTmap()) {
optionals.set(1);
}
if(struct.isSetTlist()) {
optionals.set(2);
}
if(struct.isSetTset()) {
optionals.set(3);
}
oprot.writeBitSet(optionals, 4);
if(struct.isSetTscalar()) {
struct.tscalar.write(oprot);
}
if(struct.isSetTmap()) {
{
oprot.writeI32(struct.tmap.size());
for (Map.Entry<ComplexTObject, ComplexTObject> _iter13 : struct.tmap
.entrySet()) {
_iter13.getKey().write(oprot);
_iter13.getValue().write(oprot);
}
}
}
if(struct.isSetTlist()) {
{
oprot.writeI32(struct.tlist.size());
for (ComplexTObject _iter14 : struct.tlist) {
_iter14.write(oprot);
}
}
}
if(struct.isSetTset()) {
{
oprot.writeI32(struct.tset.size());
for (ComplexTObject _iter15 : struct.tset) {
_iter15.write(oprot);
}
}
}
}
}
private static class ComplexTObjectTupleSchemeFactory implements
SchemeFactory {
public ComplexTObjectTupleScheme getScheme() {
return new ComplexTObjectTupleScheme();
}
}
}
|
package com.yahoo.vespa.model.admin.monitoring;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import static com.yahoo.vespa.model.admin.monitoring.DefaultVespaMetrics.defaultVespaMetricSet;
import static java.util.Collections.singleton;
/**
* Encapsulates vespa service metrics.
*
* @author gjoranv
*/
public class VespaMetricSet {
public static final MetricSet vespaMetricSet = new MetricSet("vespa",
getVespaMetrics(),
singleton(defaultVespaMetricSet));
private static Set<Metric> getVespaMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.addAll(getSearchNodeMetrics());
metrics.addAll(getStorageMetrics());
metrics.addAll(getDocprocMetrics());
metrics.addAll(getClusterControllerMetrics());
metrics.addAll(getQrserverMetrics());
metrics.addAll(getContainerMetrics());
metrics.addAll(getConfigServerMetrics());
metrics.addAll(getSentinelMetrics());
metrics.addAll(getOtherMetrics());
return Collections.unmodifiableSet(metrics);
}
private static Set<Metric> getSentinelMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("sentinel.restarts.count"));
metrics.add(new Metric("sentinel.totalRestarts.last"));
metrics.add(new Metric("sentinel.uptime.last"));
metrics.add(new Metric("sentinel.running.count"));
metrics.add(new Metric("sentinel.running.last"));
return metrics;
}
private static Set<Metric> getOtherMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("slobrok.heartbeats.failed.count"));
metrics.add(new Metric("logd.processed.lines.count"));
// Java (JRT) TLS metrics
metrics.add(new Metric("jrt.transport.tls-certificate-verification-failures"));
metrics.add(new Metric("jrt.transport.peer-authorization-failures"));
metrics.add(new Metric("jrt.transport.server.tls-connections-established"));
metrics.add(new Metric("jrt.transport.client.tls-connections-established"));
metrics.add(new Metric("jrt.transport.server.unencrypted-connections-established"));
metrics.add(new Metric("jrt.transport.client.unencrypted-connections-established"));
// C++ TLS metrics
metrics.add(new Metric("vds.server.network.tls-handshakes-failed"));
metrics.add(new Metric("vds.server.network.peer-authorization-failures"));
metrics.add(new Metric("vds.server.network.client.tls-connections-established"));
metrics.add(new Metric("vds.server.network.server.tls-connections-established"));
metrics.add(new Metric("vds.server.network.client.insecure-connections-established"));
metrics.add(new Metric("vds.server.network.server.insecure-connections-established"));
metrics.add(new Metric("vds.server.network.tls-connections-broken"));
metrics.add(new Metric("vds.server.network.failed-tls-config-reloads"));
// C++ Fnet metrics
metrics.add(new Metric("vds.server.fnet.num-connections"));
return metrics;
}
private static Set<Metric> getConfigServerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("configserver.requests.count"));
metrics.add(new Metric("configserver.failedRequests.count"));
metrics.add(new Metric("configserver.latency.max"));
metrics.add(new Metric("configserver.latency.sum"));
metrics.add(new Metric("configserver.latency.count"));
metrics.add(new Metric("configserver.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("configserver.cacheConfigElems.last"));
metrics.add(new Metric("configserver.cacheChecksumElems.last"));
metrics.add(new Metric("configserver.hosts.last"));
metrics.add(new Metric("configserver.delayedResponses.count"));
metrics.add(new Metric("configserver.sessionChangeErrors.count"));
metrics.add(new Metric("configserver.zkZNodes.last"));
metrics.add(new Metric("configserver.zkAvgLatency.last"));
metrics.add(new Metric("configserver.zkMaxLatency.last"));
metrics.add(new Metric("configserver.zkConnections.last"));
metrics.add(new Metric("configserver.zkOutstandingRequests.last"));
return metrics;
}
private static Set<Metric> getContainerMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("handled.requests.count"));
metrics.add(new Metric("handled.latency.max"));
metrics.add(new Metric("handled.latency.sum"));
metrics.add(new Metric("handled.latency.count"));
metrics.add(new Metric("handled.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverRejectedRequests.rate"));
metrics.add(new Metric("serverRejectedRequests.count"));
metrics.add(new Metric("serverThreadPoolSize.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.min")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.max"));
metrics.add(new Metric("serverThreadPoolSize.rate")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverThreadPoolSize.last"));
metrics.add(new Metric("serverActiveThreads.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverActiveThreads.min"));
metrics.add(new Metric("serverActiveThreads.max"));
metrics.add(new Metric("serverActiveThreads.rate")); // TODO: Remove in Vespa 8
metrics.add(new Metric("serverActiveThreads.sum"));
metrics.add(new Metric("serverActiveThreads.count"));
metrics.add(new Metric("serverActiveThreads.last"));
metrics.add(new Metric("httpapi_latency.max"));
metrics.add(new Metric("httpapi_latency.sum"));
metrics.add(new Metric("httpapi_latency.count"));
metrics.add(new Metric("httpapi_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("httpapi_pending.max"));
metrics.add(new Metric("httpapi_pending.sum"));
metrics.add(new Metric("httpapi_pending.count"));
metrics.add(new Metric("httpapi_pending.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("httpapi_num_operations.rate"));
metrics.add(new Metric("httpapi_num_updates.rate"));
metrics.add(new Metric("httpapi_num_removes.rate"));
metrics.add(new Metric("httpapi_num_puts.rate"));
metrics.add(new Metric("httpapi_succeeded.rate"));
metrics.add(new Metric("httpapi_failed.rate"));
metrics.add(new Metric("httpapi_parse_error.rate"));
metrics.add(new Metric("mem.heap.total.average"));
metrics.add(new Metric("mem.heap.free.average"));
metrics.add(new Metric("mem.heap.used.average"));
metrics.add(new Metric("mem.heap.used.max"));
metrics.add(new Metric("jdisc.memory_mappings.max"));
metrics.add(new Metric("jdisc.open_file_descriptors.max"));
metrics.add(new Metric("jdisc.gc.count.average"));
metrics.add(new Metric("jdisc.gc.count.max"));
metrics.add(new Metric("jdisc.gc.count.last"));
metrics.add(new Metric("jdisc.gc.ms.average"));
metrics.add(new Metric("jdisc.gc.ms.max"));
metrics.add(new Metric("jdisc.gc.ms.last"));
metrics.add(new Metric("jdisc.deactivated_containers.total.last"));
metrics.add(new Metric("jdisc.deactivated_containers.with_retained_refs.last"));
metrics.add(new Metric("athenz-tenant-cert.expiry.seconds.last"));
metrics.add(new Metric("jdisc.http.request.prematurely_closed.rate"));
metrics.add(new Metric("http.status.1xx.rate"));
metrics.add(new Metric("http.status.2xx.rate"));
metrics.add(new Metric("http.status.3xx.rate"));
metrics.add(new Metric("http.status.4xx.rate"));
metrics.add(new Metric("http.status.5xx.rate"));
metrics.add(new Metric("http.status.401.rate"));
metrics.add(new Metric("http.status.403.rate"));
metrics.add(new Metric("jdisc.http.request.uri_length.max"));
metrics.add(new Metric("jdisc.http.request.uri_length.sum"));
metrics.add(new Metric("jdisc.http.request.uri_length.count"));
metrics.add(new Metric("jdisc.http.request.uri_length.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("jdisc.http.request.content_size.max"));
metrics.add(new Metric("jdisc.http.request.content_size.sum"));
metrics.add(new Metric("jdisc.http.request.content_size.count"));
metrics.add(new Metric("jdisc.http.request.content_size.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.missing_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.invalid_client_cert.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_protocols.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.incompatible_ciphers.rate"));
metrics.add(new Metric("jdisc.http.ssl.handshake.failure.unknown.rate"));
return metrics;
}
private static Set<Metric> getClusterControllerMetrics() {
Set<Metric> metrics =new LinkedHashSet<>();
metrics.add(new Metric("cluster-controller.down.count.last"));
metrics.add(new Metric("cluster-controller.initializing.count.last"));
metrics.add(new Metric("cluster-controller.maintenance.count.last"));
metrics.add(new Metric("cluster-controller.retired.count.last"));
metrics.add(new Metric("cluster-controller.stopping.count.last"));
metrics.add(new Metric("cluster-controller.up.count.last"));
metrics.add(new Metric("cluster-controller.cluster-state-change.count"));
metrics.add(new Metric("cluster-controller.is-master.last"));
// TODO(hakonhall): Update this name once persistent "count" metrics has been implemented.
// DO NOT RELY ON THIS METRIC YET.
metrics.add(new Metric("cluster-controller.node-event.count"));
return metrics;
}
private static Set<Metric> getDocprocMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
// per chain
metrics.add(new Metric("documents_processed.rate"));
return metrics;
}
private static Set<Metric> getQrserverMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("peak_qps.max"));
metrics.add(new Metric("search_connections.max"));
metrics.add(new Metric("search_connections.sum"));
metrics.add(new Metric("search_connections.count"));
metrics.add(new Metric("search_connections.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("active_queries.max"));
metrics.add(new Metric("active_queries.sum"));
metrics.add(new Metric("active_queries.count"));
metrics.add(new Metric("active_queries.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("feed.latency.max"));
metrics.add(new Metric("feed.latency.sum"));
metrics.add(new Metric("feed.latency.count"));
metrics.add(new Metric("feed.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("feed.http-requests.count"));
metrics.add(new Metric("feed.http-requests.rate"));
metrics.add(new Metric("queries.rate"));
metrics.add(new Metric("query_container_latency.max"));
metrics.add(new Metric("query_container_latency.sum"));
metrics.add(new Metric("query_container_latency.count"));
metrics.add(new Metric("query_container_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_latency.max"));
metrics.add(new Metric("query_latency.sum"));
metrics.add(new Metric("query_latency.count"));
metrics.add(new Metric("query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_latency.95percentile"));
metrics.add(new Metric("query_latency.99percentile"));
metrics.add(new Metric("failed_queries.rate"));
metrics.add(new Metric("degraded_queries.rate"));
metrics.add(new Metric("hits_per_query.max"));
metrics.add(new Metric("hits_per_query.sum"));
metrics.add(new Metric("hits_per_query.count"));
metrics.add(new Metric("hits_per_query.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("query_hit_offset.max"));
metrics.add(new Metric("query_hit_offset.sum"));
metrics.add(new Metric("query_hit_offset.count"));
metrics.add(new Metric("documents_covered.count"));
metrics.add(new Metric("documents_total.count"));
metrics.add(new Metric("dispatch_internal.rate"));
metrics.add(new Metric("dispatch_fdispatch.rate"));
metrics.add(new Metric("totalhits_per_query.max"));
metrics.add(new Metric("totalhits_per_query.sum"));
metrics.add(new Metric("totalhits_per_query.count"));
metrics.add(new Metric("totalhits_per_query.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("empty_results.rate"));
metrics.add(new Metric("requestsOverQuota.rate"));
metrics.add(new Metric("requestsOverQuota.count"));
metrics.add(new Metric("relevance.at_1.sum"));
metrics.add(new Metric("relevance.at_1.count"));
metrics.add(new Metric("relevance.at_1.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("relevance.at_3.sum"));
metrics.add(new Metric("relevance.at_3.count"));
metrics.add(new Metric("relevance.at_3.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("relevance.at_10.sum"));
metrics.add(new Metric("relevance.at_10.count"));
metrics.add(new Metric("relevance.at_10.average")); // TODO: Remove in Vespa 8
// Errors from qrserver
metrics.add(new Metric("error.timeout.rate"));
metrics.add(new Metric("error.backends_oos.rate"));
metrics.add(new Metric("error.plugin_failure.rate"));
metrics.add(new Metric("error.backend_communication_error.rate"));
metrics.add(new Metric("error.empty_document_summaries.rate"));
metrics.add(new Metric("error.invalid_query_parameter.rate"));
metrics.add(new Metric("error.internal_server_error.rate"));
metrics.add(new Metric("error.misconfigured_server.rate"));
metrics.add(new Metric("error.invalid_query_transformation.rate"));
metrics.add(new Metric("error.result_with_errors.rate"));
metrics.add(new Metric("error.unspecified.rate"));
metrics.add(new Metric("error.unhandled_exception.rate"));
return metrics;
}
private static Set<Metric> getSearchNodeMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
metrics.add(new Metric("content.proton.documentdb.documents.total.last"));
metrics.add(new Metric("content.proton.documentdb.documents.ready.last"));
metrics.add(new Metric("content.proton.documentdb.documents.active.last"));
metrics.add(new Metric("content.proton.documentdb.documents.removed.last"));
metrics.add(new Metric("content.proton.documentdb.index.docs_in_memory.last"));
metrics.add(new Metric("content.proton.documentdb.disk_usage.last"));
metrics.add(new Metric("content.proton.documentdb.memory_usage.allocated_bytes.max"));
metrics.add(new Metric("content.proton.transport.query.count.rate"));
metrics.add(new Metric("content.proton.docsum.docs.rate"));
metrics.add(new Metric("content.proton.docsum.latency.max"));
metrics.add(new Metric("content.proton.docsum.latency.sum"));
metrics.add(new Metric("content.proton.docsum.latency.count"));
metrics.add(new Metric("content.proton.docsum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.transport.query.latency.max"));
metrics.add(new Metric("content.proton.transport.query.latency.sum"));
metrics.add(new Metric("content.proton.transport.query.latency.count"));
metrics.add(new Metric("content.proton.transport.query.latency.average")); // TODO: Remove in Vespa 8
// Search protocol
metrics.add(new Metric("content.proton.search_protocol.query.latency.max"));
metrics.add(new Metric("content.proton.search_protocol.query.latency.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.latency.count"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.max"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.request_size.count"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.max"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.query.reply_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.latency.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.request_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.max"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.sum"));
metrics.add(new Metric("content.proton.search_protocol.docsum.reply_size.count"));
metrics.add(new Metric("content.proton.search_protocol.docsum.requested_documents.count"));
// Executors shared between all document dbs
metrics.add(new Metric("content.proton.executor.proton.maxpending.last"));
metrics.add(new Metric("content.proton.executor.proton.accepted.rate"));
metrics.add(new Metric("content.proton.executor.flush.maxpending.last"));
metrics.add(new Metric("content.proton.executor.flush.accepted.rate"));
metrics.add(new Metric("content.proton.executor.match.maxpending.last"));
metrics.add(new Metric("content.proton.executor.match.accepted.rate"));
metrics.add(new Metric("content.proton.executor.docsum.maxpending.last"));
metrics.add(new Metric("content.proton.executor.docsum.accepted.rate"));
metrics.add(new Metric("content.proton.executor.shared.maxpending.last"));
metrics.add(new Metric("content.proton.executor.shared.accepted.rate"));
metrics.add(new Metric("content.proton.executor.warmup.maxpending.last"));
metrics.add(new Metric("content.proton.executor.warmup.accepted.rate"));
// jobs
metrics.add(new Metric("content.proton.documentdb.job.total.average"));
metrics.add(new Metric("content.proton.documentdb.job.attribute_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.memory_index_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.disk_index_fusion.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_flush.average"));
metrics.add(new Metric("content.proton.documentdb.job.document_store_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.bucket_move.average"));
metrics.add(new Metric("content.proton.documentdb.job.lid_space_compact.average"));
metrics.add(new Metric("content.proton.documentdb.job.removed_documents_prune.average"));
// Threading service (per document db)
metrics.add(new Metric("content.proton.documentdb.threading_service.master.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.master.accepted.rate"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index.accepted.rate"));
metrics.add(new Metric("content.proton.documentdb.threading_service.summary.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.summary.accepted.rate"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_inverter.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_inverter.accepted.rate"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.index_field_writer.accepted.rate"));
metrics.add(new Metric("content.proton.documentdb.threading_service.attribute_field_writer.maxpending.last"));
metrics.add(new Metric("content.proton.documentdb.threading_service.attribute_field_writer.accepted.rate"));
// lid space
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_bloat_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_fragmentation_factor.average"));
metrics.add(new Metric("content.proton.documentdb.ready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.notready.lid_space.lid_limit.last"));
metrics.add(new Metric("content.proton.documentdb.removed.lid_space.lid_limit.last"));
// resource usage
metrics.add(new Metric("content.proton.resource_usage.disk.average"));
metrics.add(new Metric("content.proton.resource_usage.disk_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_utilization.average"));
metrics.add(new Metric("content.proton.resource_usage.memory_mappings.max"));
metrics.add(new Metric("content.proton.resource_usage.open_file_descriptors.max"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.enum_store.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.multi_value.average"));
metrics.add(new Metric("content.proton.documentdb.attribute.resource_usage.feeding_blocked.last"));
// transaction log
metrics.add(new Metric("content.proton.transactionlog.entries.average"));
metrics.add(new Metric("content.proton.transactionlog.disk_usage.average"));
metrics.add(new Metric("content.proton.transactionlog.replay_time.last"));
// document store
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_usage.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.disk_bloat.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.max_bucket_spread.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.removed.document_store.memory_usage.onhold_bytes.average"));
// document store cache
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.memory_usage.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.hit_rate.average"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.lookups.rate"));
metrics.add(new Metric("content.proton.documentdb.ready.document_store.cache.invalidations.rate"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.memory_usage.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.hit_rate.average"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.lookups.rate"));
metrics.add(new Metric("content.proton.documentdb.notready.document_store.cache.invalidations.rate"));
// attribute
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.ready.attribute.memory_usage.onhold_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.notready.attribute.memory_usage.onhold_bytes.average"));
// index
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.allocated_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.used_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.dead_bytes.average"));
metrics.add(new Metric("content.proton.documentdb.index.memory_usage.onhold_bytes.average"));
// matching
metrics.add(new Metric("content.proton.documentdb.matching.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.soft_doomed_queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.max"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.count"));
metrics.add(new Metric("content.proton.documentdb.matching.query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.max")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.sum")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_collateral_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.query_setup_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.docs_matched.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doomed_queries.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.min"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.soft_doom_factor.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.max")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.sum")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.count")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_collateral_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.query_setup_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.max"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.sum"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.count"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.rerank_time.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.docs_matched.rate"));
metrics.add(new Metric("content.proton.documentdb.matching.rank_profile.limited_queries.rate"));
return metrics;
}
private static Set<Metric> getStorageMetrics() {
Set<Metric> metrics = new LinkedHashSet<>();
// TODO: For the purpose of this file and likely elsewhere, all but the last aggregate specifier,
// TODO: such as 'average' and 'sum' in the metric names below are just confusing and can be mentally
// TODO: disregarded when considering metric names. Consider cleaning up for Vespa 8.
metrics.add(new Metric("vds.datastored.alldisks.docs.average"));
metrics.add(new Metric("vds.datastored.alldisks.bytes.average"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagevisitorlifetime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.max"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.sum"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.count"));
metrics.add(new Metric("vds.filestor.alldisks.queuesize.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.max"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.sum"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.count"));
metrics.add(new Metric("vds.filestor.alldisks.averagequeuewait.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.max"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.sum"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.count"));
metrics.add(new Metric("vds.visitor.allthreads.queuesize.count.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.completed.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.created.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.failed.sum.rate"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averagemessagesendtime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.max"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.sum"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.count"));
metrics.add(new Metric("vds.visitor.allthreads.averageprocessingtime.sum.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.put.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.get.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.update.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.createiterator.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.visit.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.remove_location.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.splitbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.joinbuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.count.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.failed.rate"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.max"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.sum"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.count"));
metrics.add(new Metric("vds.filestor.alldisks.allthreads.deletebuckets.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.filestor.alldisks.allthreads.setbucketstates.count.rate"));
//Distributor
metrics.add(new Metric("vds.idealstate.buckets_rechecking.average"));
metrics.add(new Metric("vds.idealstate.idealstate_diff.average"));
metrics.add(new Metric("vds.idealstate.buckets_toofewcopies.average"));
metrics.add(new Metric("vds.idealstate.buckets_toomanycopies.average"));
metrics.add(new Metric("vds.idealstate.buckets.average"));
metrics.add(new Metric("vds.idealstate.buckets_notrusted.average"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.delete_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.delete_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.merge_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.split_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.join_bucket.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.join_bucket.pending.average"));
metrics.add(new Metric("vds.idealstate.garbage_collection.done_ok.rate"));
metrics.add(new Metric("vds.idealstate.garbage_collection.done_failed.rate"));
metrics.add(new Metric("vds.idealstate.garbage_collection.pending.average"));
metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.count"));
metrics.add(new Metric("vds.idealstate.garbage_collection.documents_removed.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.max"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.count"));
metrics.add(new Metric("vds.distributor.puts.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.puts.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.puts.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.max"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.count"));
metrics.add(new Metric("vds.distributor.removes.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.removes.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.removes.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.max"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.count"));
metrics.add(new Metric("vds.distributor.updates.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.updates.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.failures.test_and_set_failed.rate"));
metrics.add(new Metric("vds.distributor.updates.sum.diverging_timestamp_updates.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.removelocations.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.max"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.count"));
metrics.add(new Metric("vds.distributor.gets.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.gets.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.gets.sum.failures.notfound.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.max"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.sum"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.count"));
metrics.add(new Metric("vds.distributor.visitor.sum.latency.average")); // TODO: Remove in Vespa 8
metrics.add(new Metric("vds.distributor.visitor.sum.ok.rate"));
metrics.add(new Metric("vds.distributor.visitor.sum.failures.total.rate"));
metrics.add(new Metric("vds.distributor.docsstored.average"));
metrics.add(new Metric("vds.distributor.bytesstored.average"));
metrics.add(new Metric("vds.bouncer.clock_skew_aborts.count"));
return metrics;
}
}
|
package xhl.core;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.List;
import java.util.ListIterator;
import java.util.Stack;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import xhl.core.Token.TokenType;
import xhl.core.elements.Position;
import com.google.common.collect.ImmutableMap;
import static xhl.core.Token.TokenType.*;
import static com.google.common.collect.Lists.newArrayListWithExpectedSize;
/**
* Lexical analyzer for XHL
*
* @author Sergej Chodarev
*/
class Lexer {
// Regular expressions and maps for tokens
private static final Pattern numberRx = Pattern.compile("-?\\d+(\\.\\d*)?");
private static final Pattern operatorRx = Pattern.compile("[-+*/_=<>?!:&|]+");
private static final Pattern symbolRx = Pattern.compile("[a-zA-Z]\\w*");
private static final ImmutableMap<Character, TokenType> simpleTokens =
new ImmutableMap.Builder<Character, TokenType>()
.put('(', PAR_OPEN)
.put(')', PAR_CLOSE)
.put('[', BRACKET_OPEN)
.put(']', BRACKET_CLOSE)
.put('{', BRACE_OPEN)
.put('}', BRACE_CLOSE)
.put(',', COMMA)
.put('.', DOT)
.build();
private static final String OPEN = "([{";
private static final String CLOSE = "}])";
// Input stream
private final BufferedReader input;
// Currently processed line
private String line;
// Cursor position
private int lineN = 0;
private int columnN = 0;
// Lexer state
private final Stack<Integer> indent = new Stack<Integer>();
private int braceLevel = 0;
// List of tokens
private final List<Token> tokens = newArrayListWithExpectedSize(120);
private final ListIterator<Token> tokensIterator;
/**
* Initialize lexical analyzer and analyze text in input stream.
*
* @param input
* Input stream.
* @throws IOException
*/
public Lexer(Reader input) throws IOException {
this.input = new BufferedReader(input);
indent.push(0);
readTokens();
tokensIterator = tokens.listIterator();
}
/**
* Get next token.
*
* @return Next token or <code>null</code> if end of file was reached.
*/
public Token nextToken() {
if (tokensIterator.hasNext())
return tokensIterator.next();
else
return null;
}
/** Get next token without removing it from the list. */
public Token checkNextToken() {
if (tokensIterator.hasNext())
return tokens.get(tokensIterator.nextIndex());
else
return null;
}
/**
* Read all tokens from input stream.
*/
private void readTokens() throws IOException {
line = input.readLine();
while (line != null) {
lineN++;
columnN = 0;
// Read indentation
int indentation = readIndentation();
// Skip empty line
if (endOfLine()) {
line = input.readLine();
continue;
}
// Add INDEND or DEDENT tokens
if (braceLevel == 0)
processIndentation(indentation);
// Read tokens on the line
while (!endOfLine()) {
tokens.add(readToken());
skipSpace();
}
// End of line
if (braceLevel == 0)
tokens.add(new Token(LINEEND, getPosition()));
// Next line
line = input.readLine();
}
// DEDENT on the ond of file
while (indent.peek() > 0) {
indent.pop();
tokens.add(new Token(DEDENT, getPosition()));
}
}
/**
* Generate INDENT or DEDENT tokens for indentation change.
*
* @param indentation
* Level of indentation.
*/
private void processIndentation(int indentation) {
if (indentation > indent.peek()) {
indent.push(indentation);
tokens.add(new Token(INDENT, getPosition()));
} else if (indentation < indent.peek()) {
while (indentation < indent.peek()) {
indent.pop();
tokens.add(new Token(DEDENT, getPosition()));
}
// TODO indentation error if levels does not match
}
}
/**
* Is and of line reached?
*
* Comment also ends line.
*/
private boolean endOfLine() {
return (columnN == line.length() || line.charAt(columnN) == '#' || line
.charAt(columnN) == ';'); // ; only for backwards compatibility
}
/**
* Read spaces and count the amount of indentation.
*
* @return indentation level.
*/
private int readIndentation() {
char c;
int indentation = 0;
while (!endOfLine()) {
c = line.charAt(columnN);
switch (c) {
case ' ':
indentation++;
break;
case '\t':
indentation += 8;
indentation -= indentation % 8;
break;
default:
return indentation;
}
columnN++;
}
return indentation;
}
private void skipSpace() {
while (!endOfLine() && (" \t".indexOf(line.charAt(columnN)) != -1))
columnN++;
}
/**
* Try to match regular expression at the current cursor position.
*
* @param rx
* Regular expression
* @return Matched text or <code>null</code> if regular expression did not
* match.
*/
private String matchRx(Pattern rx) {
Matcher m = rx.matcher(line.substring(columnN));
if (m.lookingAt()) {
columnN += m.end();
return m.group();
} else
return null;
}
/**
* Read one token at current position.
*/
private Token readToken() {
if (line.charAt(columnN) == '"')
return readString();
char ch = line.charAt(columnN);
// Punctuation
if (simpleTokens.containsKey(ch)) {
TokenType type = simpleTokens.get(ch);
if (OPEN.indexOf(ch) != -1)
braceLevel++;
else if (CLOSE.indexOf(ch) != -1)
braceLevel
columnN++;
return new Token(type, getPosition());
}
String text = matchRx(numberRx);
if (text != null)
return new Token(NUMBER, Double.valueOf(text), getPosition());
text = matchRx(operatorRx);
if (text != null)
return new Token(OPERATOR, text, getPosition());
text = matchRx(symbolRx);
if (text != null) {
if (text.equals("true"))
return new Token(TRUE, getPosition());
if (text.equals("false"))
return new Token(FALSE, getPosition());
if (text.equals("none"))
return new Token(NONE, getPosition());
else
return new Token(SYMBOL, text, getPosition());
}
return null;
}
/** Read string token. */
private Token readString() {
StringBuilder sb = new StringBuilder();
char ch;
columnN++;
while (columnN < line.length()) {
ch = line.charAt(columnN);
if (ch == '"')
break;
sb.append(ch);
columnN++;
}
// TODO: Error on unclosed string
columnN++;
return new Token(STRING, sb.toString(), getPosition());
}
private Position getPosition() {
return new Position("input", lineN, columnN); // FIXME filename
}
}
|
import java.awt.geom.*;
import java.util.*;
public class PrioritySearchTree {
PSTNode[] heap;
public PrioritySearchTree(ArrayList<PSTPoint> points) {
if(points == null) return;
Collections.sort(points); // Sort by y-coordinate in increasing order
this.heap = new PSTNode[heapSize(treeHeight(points.size()))];
buildTree(0,points);
}
private void buildTree(int rootIndex, ArrayList<PSTPoint> points) {
if(points == null || points.size() < 1) return;
// Since points are ordered by y increasing, smallest is first
PSTPoint rootPoint = points.get(0);
// Find median X value
// - uses average X value of non-root points
double sumX = 0.0d;
for(PSTPoint p : points) {
sumX += p.getX();
}
sumX -= rootPoint.getX();
double medianX = sumX/(points.size()-1);
// Set the root node
heap[rootIndex] = new PSTNode(rootPoint,medianX);
// Bisect the non-root points into two arrays above and below the median
ArrayList<PSTPoint> upperPoints = new ArrayList<PSTPoint>();
ArrayList<PSTPoint> lowerPoints = new ArrayList<PSTPoint>();
for(PSTPoint p : points) {
if(p == rootPoint) continue;
// note: if p.x is equal to median, it will be added to left child
else if(p.getX() <= medianX) lowerPoints.add(p);
else upperPoints.add(p);
}
if(lowerPoints.size() > 0) buildTree(indexOfLeftChild(rootIndex),lowerPoints);
if(upperPoints.size() > 0) buildTree(indexOfRightChild(rootIndex),upperPoints);
}
public ArrayList<PSTPoint> findAllPointsWithin(double x1,
double x2, double y2) {
return findAllPointsWithin(x1,x2,y2,new ArrayList<PSTPoint>(),0);
}
public ArrayList<PSTPoint> findAllPointsWithin(double x1, double y1,
double x2, double y2) {
return findAllPointsWithin(x1,y1,x2,y2,new ArrayList<PSTPoint>(),0);
}
public ArrayList<PSTPoint> findAllPointsWithin(double x1, double y1,
double x2, double y2,
ArrayList<PSTPoint> list,
int rootIndex) {
PSTNode node = heap[rootIndex];
if(node == null) return list;
double nodeY = node.getY();
double nodeX = node.getX();
double nodeR = node.getMedianX();
if(nodeY < y1) {
// nodeR >= points in left tree >= x1
if(nodeR >= x1)
findAllPointsWithin(x1,y1,x2,y2,list,indexOfLeftChild(rootIndex));
// nodeR < points in right tree <= x2
if(nodeR < x2)
findAllPointsWithin(x1,y1,x2,y2,list,indexOfRightChild(rootIndex));
} else {
// Now that nodeY >= y1, we can do a 3 bounded search
findAllPointsWithin(x1,x2,y2,list,rootIndex);
}
return list;
}
// Note that as y2 and x2 approach positive infinity and
// x1 approaches negative infinity, this search visits more nodes.
// In the worst case, all nodes are visited.
public ArrayList<PSTPoint> findAllPointsWithin(double x1,
double x2, double y2,
ArrayList<PSTPoint> list,
int rootIndex) {
PSTNode node = heap[rootIndex];
if(node == null) return list;
double nodeX = node.getX();
double nodeY = node.getY();
double nodeR = node.getMedianX();
if(nodeY <= y2) {
if(nodeX >= x1 && nodeX <= x2) {
list.add(node.getPoint());
}
// nodeR >= points in left tree >= x1
if(nodeR >= x1)
findAllPointsWithin(x1,x2,y2,list,indexOfLeftChild(rootIndex));
// nodeR < points in right tree <= x2
if(nodeR < x2)
findAllPointsWithin(x1,x2,y2,list,indexOfRightChild(rootIndex));
}
return list;
}
public double minX() throws EmptyTreeException {
int index = 0;
if(heap[index] == null) throw new EmptyTreeException();
double min = heap[index].getX();
while(indexOfLeftChild(index) < heap.length &&
heap[indexOfLeftChild(index)] != null) {
index = indexOfLeftChild(index);
if(heap[index].getX() < min)
min = heap[index].getX();
}
return min;
}
public double maxX() throws EmptyTreeException {
int index = 0;
if(heap[index] == null) throw new EmptyTreeException();
double max = heap[index].getX();
while(indexOfRightChild(index) < heap.length &&
heap[indexOfRightChild(index)] != null) {
index = indexOfRightChild(index);
if(heap[index].getX() > max)
max = heap[index].getX();
}
return max;
}
public double minY() throws EmptyTreeException {
if(heap[0] == null) throw new EmptyTreeException();
return heap[0].getY();
}
public double maxY() throws EmptyTreeException {
if(heap[0] == null) throw new EmptyTreeException();
return maxY(0);
}
private double maxY(int index) {
double max = heap[index].getY();
if(indexOfRightChild(index) < heap.length) {
if(heap[indexOfLeftChild(index)] == null &&
heap[indexOfRightChild(index)] != null) {
max = maxY(indexOfRightChild(index));
} else if(heap[indexOfLeftChild(index)] != null &&
heap[indexOfRightChild(index)] == null) {
max = maxY(indexOfLeftChild(index));
} else if(heap[indexOfLeftChild(index)] != null &&
heap[indexOfRightChild(index)] != null) {
double maxLeft = maxY(indexOfLeftChild(index));
double maxRight = maxY(indexOfRightChild(index));
if(maxLeft > maxRight) max = maxLeft;
else max = maxRight;
}
}
return max;
}
// Determine the height of a balanced tree with n elements
private static int treeHeight(int n) {
return doubleToInt(Math.ceil(Math.log(n+1)/Math.log(2))-1);
}
// Determine the max number of heap nodes in a tree of given height
private static int heapSize(int height) {
return doubleToInt(Math.pow(2, height + 1)-1);
}
private static int indexOfLeftChild(int rootIndex) {
return (2*rootIndex)+1;
}
private static int indexOfRightChild(int rootIndex) {
return (2*rootIndex)+2;
}
private static int doubleToInt(double d) {
return (new Double(d)).intValue();
}
private static void printList(ArrayList<PSTPoint> points) {
for(PSTPoint p : points) System.out.print(p + " ");
System.out.println();
}
public static void main(String[] args) throws EmptyTreeException {
// Test construction
new PrioritySearchTree(null);
ArrayList<PSTPoint> testPoints = new ArrayList<PSTPoint>();
testPoints.add(new PSTPoint(1.0d,1.0d));
testPoints.add(new PSTPoint(2.0d,5.0d));
testPoints.add(new PSTPoint(3.0d,3.0d));
testPoints.add(new PSTPoint(-3.0d,0.0d));
testPoints.add(new PSTPoint(-2.0d,4.0d));
testPoints.add(new PSTPoint(-1.0d,2.0d));
testPoints.add(new PSTPoint(4.0d,-1.0d));
testPoints.add(new PSTPoint(5.0d,-2.0d));
testPoints.add(new PSTPoint(6.0d,-3.0d));
testPoints.add(new PSTPoint(7.0d,22.0d));
testPoints.add(new PSTPoint(8.0d,42.0d));
testPoints.add(new PSTPoint(0.0d,-30.0d));
PrioritySearchTree pst = new PrioritySearchTree(testPoints);
// Test query
System.out.print("All points within 4 bounds: ");
printList(pst.findAllPointsWithin(-3.0d,-3.0d,3.0d,3.0d));
System.out.print("All points within 3 bounds: ");
printList(pst.findAllPointsWithin(-3.0d,3.0d,3.0d));
System.out.println("MinY: " + pst.minY());
System.out.println("MaxY: " + pst.maxY());
System.out.println("MinX: " + pst.minX());
System.out.println("MaxX: " + pst.maxX());
// Test with more data
testPoints = new ArrayList<PSTPoint>();
for(double i = 1.0d; i < 10000; i++) {
testPoints.add(new PSTPoint(i,i));
testPoints.add(new PSTPoint(-i,-i));
}
pst = new PrioritySearchTree(testPoints);
System.out.println("All points (larger data set) within 3 bounds: ");
printList(pst.findAllPointsWithin(-10.0d,-10.0d,10.0d,10.0d));
System.out.println("MinY: " + pst.minY());
System.out.println("MaxY: " + pst.maxY());
System.out.println("MinX: " + pst.minX());
System.out.println("MaxX: " + pst.maxX());
}
public class EmptyTreeException extends Exception {
public EmptyTreeException() {
super("Tree is empty");
}
}
}
|
package org.openhds.controller.service.impl;
import org.openhds.controller.service.SettingsService;
import org.openhds.domain.model.GeneralSettings;
import org.openhds.domain.service.SitePropertiesService;
public class SettingsServiceImpl implements SettingsService {
private SitePropertiesService siteProperties;
public SettingsServiceImpl(SitePropertiesService siteProperties){
this.siteProperties = siteProperties;
}
@Override
public GeneralSettings getSettings() {
GeneralSettings gs = new GeneralSettings();
int minimumAgeOfParents = siteProperties.getMinimumAgeOfParents();
int minimumAgeOfHouseholdHead = siteProperties.getMinimumAgeOfHouseholdHead();
int minMarriageAge = siteProperties.getMinimumAgeOfMarriage();
int minimumAgeOfPregnancy = siteProperties.getMinimumAgeOfPregnancy();
gs.setMinimumAgeOfParents(minimumAgeOfParents);
gs.setMinimumAgeOfHouseholdHead(minimumAgeOfHouseholdHead);
gs.setMinMarriageAge(minMarriageAge);
gs.setMinimumAgeOfPregnancy(minimumAgeOfPregnancy);
return gs;
}
}
|
package controllers;
import static models.QUser.user;
import static models.QRole.role1;
import java.io.UnsupportedEncodingException;
import java.math.BigInteger;
import java.security.SecureRandom;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.inject.Inject;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.PasswordAuthentication;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import org.apache.commons.lang.RandomStringUtils;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import com.querydsl.core.Tuple;
import exceptions.GeoportaalBeheerException;
import nl.idgis.commons.utils.Mail;
import play.i18n.*;
import play.Play;
import play.data.Form;
import play.mvc.Controller;
import play.Routes;
import play.mvc.Result;
import util.QueryDSL;
import play.data.validation.Constraints;
/**
* The class for the user entity
*
* @author Sandro
*
*/
public class User extends Controller {
@Inject QueryDSL q;
/**
* Renders the login page
*
* @param r the return url
* @return the {@link Result} of the login page
*/
public Result login(final String r) {
// Create a login form and fill in the saved fields
final Form<Login> loginForm = Form.form(Login.class).fill(new Login(r));
// Fetches the change password and forgot password values, if null they result in an empty string
String cpMsg = session("changePassword");
String fpMsg = session("forgotPassword");
if(cpMsg == null) {
cpMsg = "";
}
if(fpMsg == null) {
fpMsg = "";
}
// Empties the change password and forgot password keys
session("changePassword", "");
session("forgotPassword", "");
// Returns the login page
return ok(views.html.login.render(loginForm, cpMsg, fpMsg));
}
/**
* Authentication of the user
*
* @return the {@link Result} of the authentication handling: either the login page with an error message, the previous request or the index page
*/
public Result authenticate() {
// Create a login form object from the submitted form
final Form<Login> loginForm = Form.form(Login.class).bindFromRequest();
// Call the validate method
validate(loginForm);
// Check if form has errors
if(loginForm.hasErrors()) {
return badRequest(views.html.login.render(loginForm, "", ""));
} else {
// Clear the session and set the username key to the logged in username
session().clear();
session("username", loginForm.get().username.trim());
// Check if getReturnUrl method is null, if so the URL results into the index method
if(loginForm.get().getReturnUrl() != null) {
return redirect(loginForm.get().getReturnUrl());
} else {
return redirect(controllers.routes.Index.index("", "none", "none", "", "", "", "", "dateDesc", ""));
}
}
}
/**
* Checks if the given password belongs to the username
*
* @param loginForm the login form where the username and password is stored
*/
public void validate(Form<Login> loginForm) {
q.withTransaction(tx -> {
String username = loginForm.get().username.trim();
// Fetches the password that belongs to the username
String dbPassword = tx
.select(user.password)
.from(user)
.where(user.username.equalsIgnoreCase(username)
.and(user.archived.isFalse()))
.fetchOne();
// Create a BCrypt encoder
BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();
// Checks if there is a password at all or if the password matches to the one from storage
if(dbPassword == null || !encoder.matches(loginForm.get().password, dbPassword)) {
loginForm.reject(Messages.get("login.error.message"));
}
});
}
/**
* Log out of the admin
*
* @return the {@link Result} of the index page (because the user is logged out it essentially defaults back to the login page)
*/
public Result logout() {
// Clear all keys from the session
session().clear();
// Returns the internal portal page
return redirect(Play.application().configuration().getString("geoportaal.internal.url"));
}
/**
* Render the change password page
*
* @return the {@link Result} of the change password page
*/
public Result renderChangePassword() {
// Create the form object of change password
final Form<ChangePassword> cpForm = Form.form(ChangePassword.class);
// Returns the change password page
return ok(views.html.changepassword.render(cpForm));
}
/**
* Changing the password in the database if form is correctly filled out
*
* @return the {@link Result} of the login page with a message about the success of changing the password
*/
public Result changePassword() {
// Create a change password form object from the submitted form
final Form<ChangePassword> cpForm = Form.form(ChangePassword.class).bindFromRequest();
// Reject and return with a message if one of the input fields is empty
if("".equals(cpForm.get().username.trim()) || "".equals(cpForm.get().oldPassword.trim()) || "".equals(cpForm.get().newPassword.trim()) ||
"".equals(cpForm.get().repeatNewPassword.trim())) {
cpForm.reject(Messages.get("password.edit.error.incomplete.message"));
}
if(cpForm.hasErrors()) {
return badRequest(views.html.changepassword.render(cpForm));
}
q.withTransaction(tx -> {
// Fetch the password that belongs to the given username
String dbPassword = tx
.select(user.password)
.from(user)
.where(user.username.eq(cpForm.get().username.trim()))
.fetchOne();
// Reject and return with a message if the username isn't known or if the username and old password don't match
BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();
if(dbPassword == null || !encoder.matches(cpForm.get().oldPassword, dbPassword)) {
cpForm.reject(Messages.get("password.edit.error.mismatch.message"));
}
});
if(cpForm.hasErrors()) {
return badRequest(views.html.changepassword.render(cpForm));
}
// Reject and return with a message if the new password and the repeating of the new password don't match
if(!cpForm.get().newPassword.equals(cpForm.get().repeatNewPassword)) {
cpForm.reject(Messages.get("password.edit.error.repeat.message"));
}
if(cpForm.hasErrors()) {
return badRequest(views.html.changepassword.render(cpForm));
}
q.withTransaction(tx -> {
// Encode the new password and update password in the database
BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();
String encodedNP = encoder.encode(cpForm.get().newPassword);
tx.update(user)
.set(user.password, encodedNP)
.where(user.username.eq(cpForm.get().username.trim()))
.execute();
});
// Set the changepassword key in the session for displaying success message
session("changePassword", Messages.get("password.edit.success"));
// Return the index page, which defaults to login page because the user is not logged in
return redirect(controllers.routes.Index.index("", "none", "none", "", "", "", "", "dateDesc", ""));
}
/**
* Render the forgot password page
*
* @return the {@link Result} of the forgot password page
*/
public Result renderForgotPassword() {
// Create the form object of forgot password
final Form<ForgotPassword> fpForm = Form.form(ForgotPassword.class);
// Return the forgot password page
return ok(views.html.forgotpassword.render(fpForm));
}
/**
* Sending an e-mail with a new password if the given username is known
*
* @return the {@link Result} of the login page with a message about the success of sending the e-mail
*/
public Result forgotPassword() {
// Create a forgot password form object from the submitted form
final Form<ForgotPassword> fpForm = Form.form(ForgotPassword.class).bindFromRequest();
// Fetches the username and password of the e-mail client
final String emailUsername = Play.application().configuration().getString("geoportaal.email.username");
final String emailPassword = Play.application().configuration().getString("geoportaal.email.password");
// Generates a String and an encoded version of that string
String password = RandomStringUtils.randomAlphanumeric(10);
BCryptPasswordEncoder encoder = new BCryptPasswordEncoder();
String encodedPW = encoder.encode(password);
q.withTransaction(tx -> {
// Updates password in the database
Long count = tx.update(user)
.set(user.password, encodedPW)
.where(user.username.eq(fpForm.get().username.trim()))
.execute();
Integer finalCount = count.intValue();
if(finalCount.equals(1)) {
// Create a hashmap with a password key and value
Map<String, Object> placeholders = new HashMap<String, Object>();
placeholders.put("password", password);
// Format the message of the e-mail
String msg = Mail.createMsg(placeholders, Messages.get("password.forgot.email.message", "${password}"));
try {
// Send the e-mail
Mail.send(emailUsername, emailPassword, "mail.your-server.de", 25, fpForm.get().username.trim(), emailUsername,
Messages.get("password.forgot.email.subject"), msg);
} catch(Exception e) {
throw e;
}
}
// Throw exception if the count of the affected rows is more than 1
if(finalCount > 1) {
throw new GeoportaalBeheerException("Resetting password: too many rows affected");
}
});
// Set the forgotpassword key in the session for displaying success message
session("forgotPassword", Messages.get("password.forgot.success"));
// Return the index page, which defaults to login page because the user is not logged in
return redirect(controllers.routes.Index.index("", "none", "none", "", "", "", "", "dateDesc", ""));
}
public Result loginHelp() {
return ok(views.html.loginhelp.render());
}
/**
* The login form
*
* @author Sandro
*
*/
public static class Login {
private String username;
private String password;
private String returnUrl;
public Login() {
}
public Login(final String returnUrl) {
this.returnUrl = returnUrl;
}
public String getUsername() {
return username;
}
public void setUsername(final String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(final String password) {
this.password = password;
}
public String getReturnUrl() {
return returnUrl;
}
public void setReturnUrl(final String returnUrl) {
this.returnUrl = returnUrl;
}
}
/**
* The change password form
*
* @author Sandro
*
*/
public static class ChangePassword {
private String username;
private String oldPassword;
private String newPassword;
private String repeatNewPassword;
public ChangePassword() {
}
public String getUsername() {
return username;
}
public void setUsername(final String username) {
this.username = username;
}
public String getOldPassword() {
return oldPassword;
}
public void setOldPassword(final String oldPassword) {
this.oldPassword = oldPassword;
}
public String getNewPassword() {
return newPassword;
}
public void setNewPassword(final String newPassword) {
this.newPassword = newPassword;
}
public String getRepeatNewPassword() {
return repeatNewPassword;
}
public void setRepeatNewPassword(final String repeatNewPassword) {
this.repeatNewPassword = repeatNewPassword;
}
}
/**
* The forgot password form
*
* @author Sandro
*
*/
public static class ForgotPassword {
private String username;
public ForgotPassword() {
}
public String getUsername() {
return username;
}
public void setUsername(final String username) {
this.username = username;
}
}
}
|
package org.andengine.engine;
import org.andengine.audio.music.MusicFactory;
import org.andengine.audio.music.MusicManager;
import org.andengine.audio.sound.SoundFactory;
import org.andengine.audio.sound.SoundManager;
import org.andengine.engine.camera.Camera;
import org.andengine.engine.handler.DrawHandlerList;
import org.andengine.engine.handler.IDrawHandler;
import org.andengine.engine.handler.IUpdateHandler;
import org.andengine.engine.handler.UpdateHandlerList;
import org.andengine.engine.handler.runnable.RunnableHandler;
import org.andengine.engine.options.EngineOptions;
import org.andengine.entity.scene.Scene;
import org.andengine.input.touch.TouchEvent;
import org.andengine.input.touch.controller.ITouchController;
import org.andengine.input.touch.controller.ITouchController.ITouchEventCallback;
import org.andengine.input.touch.controller.MultiTouchController;
import org.andengine.input.touch.controller.SingleTouchController;
import org.andengine.opengl.font.FontFactory;
import org.andengine.opengl.font.FontManager;
import org.andengine.opengl.shader.ShaderProgramManager;
import org.andengine.opengl.texture.TextureManager;
import org.andengine.opengl.texture.atlas.bitmap.BitmapTextureAtlasTextureRegionFactory;
import org.andengine.opengl.util.GLState;
import org.andengine.opengl.vbo.VertexBufferObjectManager;
import org.andengine.sensor.SensorDelay;
import org.andengine.sensor.accelerometer.AccelerometerData;
import org.andengine.sensor.accelerometer.AccelerometerSensorOptions;
import org.andengine.sensor.accelerometer.IAccelerometerListener;
import org.andengine.sensor.location.ILocationListener;
import org.andengine.sensor.location.LocationProviderStatus;
import org.andengine.sensor.location.LocationSensorOptions;
import org.andengine.sensor.orientation.IOrientationListener;
import org.andengine.sensor.orientation.OrientationData;
import org.andengine.sensor.orientation.OrientationSensorOptions;
import org.andengine.util.constants.TimeConstants;
import org.andengine.util.debug.Debug;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationProvider;
import android.os.Bundle;
import android.os.Vibrator;
import android.view.Display;
import android.view.MotionEvent;
import android.view.View;
import android.view.View.OnTouchListener;
import android.view.WindowManager;
public class Engine implements SensorEventListener, OnTouchListener, ITouchEventCallback, TimeConstants, LocationListener {
// Constants
private static final SensorDelay SENSORDELAY_DEFAULT = SensorDelay.GAME;
private static final int UPDATEHANDLERS_CAPACITY_DEFAULT = 8;
private static final int DRAWHANDLERS_CAPACITY_DEFAULT = 4;
// Fields
private boolean mRunning = false;
private long mLastTick = -1;
private float mSecondsElapsedTotal = 0;
private final EngineLock mEngineLock = new EngineLock();
private final UpdateThread mUpdateThread = new UpdateThread();
private final RunnableHandler mUpdateThreadRunnableHandler = new RunnableHandler();
private final EngineOptions mEngineOptions;
protected final Camera mCamera;
private ITouchController mTouchController;
private final TextureManager mTextureManager = new TextureManager();
private final FontManager mFontManager = new FontManager();
private final ShaderProgramManager mShaderProgramManager = new ShaderProgramManager();
private SoundManager mSoundManager;
private MusicManager mMusicManager;
protected Scene mScene;
private Vibrator mVibrator;
private ILocationListener mLocationListener;
private Location mLocation;
private IAccelerometerListener mAccelerometerListener;
private AccelerometerData mAccelerometerData;
private IOrientationListener mOrientationListener;
private OrientationData mOrientationData;
private final UpdateHandlerList mUpdateHandlers = new UpdateHandlerList(Engine.UPDATEHANDLERS_CAPACITY_DEFAULT);
private final DrawHandlerList mDrawHandlers = new DrawHandlerList(Engine.DRAWHANDLERS_CAPACITY_DEFAULT);
protected int mSurfaceWidth = 1; // 1 to prevent accidental DIV/0
protected int mSurfaceHeight = 1; // 1 to prevent accidental DIV/0
private boolean mIsMethodTracing;
// Constructors
public Engine(final EngineOptions pEngineOptions) {
BitmapTextureAtlasTextureRegionFactory.reset();
SoundFactory.onCreate();
MusicFactory.onCreate();
FontFactory.onCreate();
VertexBufferObjectManager.onCreate();
this.mTextureManager.onCreate();
this.mFontManager.onCreate();
this.mShaderProgramManager.onCreate();
this.mEngineOptions = pEngineOptions;
this.mCamera = pEngineOptions.getCamera();
if(this.mEngineOptions.getTouchOptions().needsMultiTouch()) {
this.setTouchController(new MultiTouchController());
} else {
this.setTouchController(new SingleTouchController());
}
if(this.mEngineOptions.getAudioOptions().needsSound()) {
this.mSoundManager = new SoundManager();
}
if(this.mEngineOptions.getAudioOptions().needsMusic()) {
this.mMusicManager = new MusicManager();
}
this.mUpdateThread.start();
}
// Getter & Setter
public synchronized boolean isRunning() {
return this.mRunning;
}
public synchronized void start() {
if(!this.mRunning) {
this.mLastTick = System.nanoTime();
this.mRunning = true;
}
}
public synchronized void stop() {
if(this.mRunning) {
this.mRunning = false;
}
}
public EngineLock getEngineLock() {
return this.mEngineLock;
}
public Scene getScene() {
return this.mScene;
}
public void setScene(final Scene pScene) {
this.mScene = pScene;
}
public EngineOptions getEngineOptions() {
return this.mEngineOptions;
}
public Camera getCamera() {
return this.mCamera;
}
public float getSecondsElapsedTotal() {
return this.mSecondsElapsedTotal;
}
public void setSurfaceSize(final int pSurfaceWidth, final int pSurfaceHeight) {
this.mSurfaceWidth = pSurfaceWidth;
this.mSurfaceHeight = pSurfaceHeight;
this.onUpdateCameraSurface();
}
protected void onUpdateCameraSurface() {
this.mCamera.setSurfaceSize(0, 0, this.mSurfaceWidth, this.mSurfaceHeight);
}
public int getSurfaceWidth() {
return this.mSurfaceWidth;
}
public int getSurfaceHeight() {
return this.mSurfaceHeight;
}
public ITouchController getTouchController() {
return this.mTouchController;
}
public void setTouchController(final ITouchController pTouchController) {
this.mTouchController = pTouchController;
this.mTouchController.setTouchEventCallback(this);
}
public AccelerometerData getAccelerometerData() {
return this.mAccelerometerData;
}
public OrientationData getOrientationData() {
return this.mOrientationData;
}
public TextureManager getTextureManager() {
return this.mTextureManager;
}
public FontManager getFontManager() {
return this.mFontManager;
}
public ShaderProgramManager getShaderProgramManager() {
return this.mShaderProgramManager;
}
public SoundManager getSoundManager() throws IllegalStateException {
if(this.mSoundManager != null) {
return this.mSoundManager;
} else {
throw new IllegalStateException("To enable the SoundManager, check the EngineOptions!");
}
}
public MusicManager getMusicManager() throws IllegalStateException {
if(this.mMusicManager != null) {
return this.mMusicManager;
} else {
throw new IllegalStateException("To enable the MusicManager, check the EngineOptions!");
}
}
public void registerUpdateHandler(final IUpdateHandler pUpdateHandler) {
this.mUpdateHandlers.add(pUpdateHandler);
}
public void unregisterUpdateHandler(final IUpdateHandler pUpdateHandler) {
this.mUpdateHandlers.remove(pUpdateHandler);
}
public void clearUpdateHandlers() {
this.mUpdateHandlers.clear();
}
public void registerDrawHandler(final IDrawHandler pDrawHandler) {
this.mDrawHandlers.add(pDrawHandler);
}
public void unregisterDrawHandler(final IDrawHandler pDrawHandler) {
this.mDrawHandlers.remove(pDrawHandler);
}
public void clearDrawHandlers() {
this.mDrawHandlers.clear();
}
public boolean isMethodTracing() {
return this.mIsMethodTracing;
}
public void startMethodTracing(final String pTraceFileName) {
if(!this.mIsMethodTracing) {
this.mIsMethodTracing = true;
android.os.Debug.startMethodTracing(pTraceFileName);
}
}
public void stopMethodTracing() {
if(this.mIsMethodTracing) {
android.os.Debug.stopMethodTracing();
this.mIsMethodTracing = false;
}
}
// Methods for/from SuperClass/Interfaces
@Override
public void onAccuracyChanged(final Sensor pSensor, final int pAccuracy) {
if(this.mRunning) {
switch(pSensor.getType()) {
case Sensor.TYPE_ACCELEROMETER:
if(this.mAccelerometerData != null) {
this.mAccelerometerData.setAccuracy(pAccuracy);
this.mAccelerometerListener.onAccelerometerChanged(this.mAccelerometerData);
} else if(this.mOrientationData != null) {
this.mOrientationData.setAccelerometerAccuracy(pAccuracy);
this.mOrientationListener.onOrientationChanged(this.mOrientationData);
}
break;
case Sensor.TYPE_MAGNETIC_FIELD:
this.mOrientationData.setMagneticFieldAccuracy(pAccuracy);
this.mOrientationListener.onOrientationChanged(this.mOrientationData);
break;
}
}
}
@Override
public void onSensorChanged(final SensorEvent pEvent) {
if(this.mRunning) {
switch(pEvent.sensor.getType()) {
case Sensor.TYPE_ACCELEROMETER:
if(this.mAccelerometerData != null) {
this.mAccelerometerData.setValues(pEvent.values);
this.mAccelerometerListener.onAccelerometerChanged(this.mAccelerometerData);
} else if(this.mOrientationData != null) {
this.mOrientationData.setAccelerometerValues(pEvent.values);
this.mOrientationListener.onOrientationChanged(this.mOrientationData);
}
break;
case Sensor.TYPE_MAGNETIC_FIELD:
this.mOrientationData.setMagneticFieldValues(pEvent.values);
this.mOrientationListener.onOrientationChanged(this.mOrientationData);
break;
}
}
}
@Override
public void onLocationChanged(final Location pLocation) {
if(this.mLocation == null) {
this.mLocation = pLocation;
} else {
if(pLocation == null) {
this.mLocationListener.onLocationLost();
} else {
this.mLocation = pLocation;
this.mLocationListener.onLocationChanged(pLocation);
}
}
}
@Override
public void onProviderDisabled(final String pProvider) {
this.mLocationListener.onLocationProviderDisabled();
}
@Override
public void onProviderEnabled(final String pProvider) {
this.mLocationListener.onLocationProviderEnabled();
}
@Override
public void onStatusChanged(final String pProvider, final int pStatus, final Bundle pExtras) {
switch(pStatus) {
case LocationProvider.AVAILABLE:
this.mLocationListener.onLocationProviderStatusChanged(LocationProviderStatus.AVAILABLE, pExtras);
break;
case LocationProvider.OUT_OF_SERVICE:
this.mLocationListener.onLocationProviderStatusChanged(LocationProviderStatus.OUT_OF_SERVICE, pExtras);
break;
case LocationProvider.TEMPORARILY_UNAVAILABLE:
this.mLocationListener.onLocationProviderStatusChanged(LocationProviderStatus.TEMPORARILY_UNAVAILABLE, pExtras);
break;
}
}
@Override
public boolean onTouch(final View pView, final MotionEvent pSurfaceMotionEvent) {
if(this.mRunning) {
this.mTouchController.onHandleMotionEvent(pSurfaceMotionEvent);
try {
/*
* As a human cannot interact 1000x per second, we pause the
* UI-Thread for a little.
*/
Thread.sleep(20); // TODO Maybe this can be removed, when TouchEvents are handled on the UpdateThread!
} catch (final InterruptedException e) {
Debug.e(e);
}
return true;
} else {
return false;
}
}
@Override
public boolean onTouchEvent(final TouchEvent pSurfaceTouchEvent) {
/*
* Let the engine determine which scene and camera this event should be
* handled by.
*/
final Scene scene = this.getSceneFromSurfaceTouchEvent(pSurfaceTouchEvent);
final Camera camera = this.getCameraFromSurfaceTouchEvent(pSurfaceTouchEvent);
this.convertSurfaceToSceneTouchEvent(camera, pSurfaceTouchEvent);
if(this.onTouchHUD(camera, pSurfaceTouchEvent)) {
return true;
} else {
/* If HUD didn't handle it, Scene may handle it. */
return this.onTouchScene(scene, pSurfaceTouchEvent);
}
}
protected boolean onTouchHUD(final Camera pCamera, final TouchEvent pSceneTouchEvent) {
if(pCamera.hasHUD()) {
return pCamera.getHUD().onSceneTouchEvent(pSceneTouchEvent);
} else {
return false;
}
}
protected boolean onTouchScene(final Scene pScene, final TouchEvent pSceneTouchEvent) {
if(pScene != null) {
return pScene.onSceneTouchEvent(pSceneTouchEvent);
} else {
return false;
}
}
// Methods
public void runOnUpdateThread(final Runnable pRunnable) {
this.mUpdateThreadRunnableHandler.postRunnable(pRunnable);
}
/**
* @param pRunnable the {@link Runnable} to run mutually exclusive to the {@link UpdateThread} and the GL-{@link Thread}.
* @see {@link Engine#getEngineLock()} to manually synchronize and avoid creating a {@link Runnable}.
*/
public void runSafely(final Runnable pRunnable) {
synchronized(this.mEngineLock) {
pRunnable.run();
}
}
public void onDestroy() {
this.mTextureManager.onDestroy();
this.mFontManager.onDestroy();
this.mShaderProgramManager.onDestroy();
VertexBufferObjectManager.onDestroy();
this.mUpdateThread.interrupt();
}
public void onReloadResources() {
this.mTextureManager.onReload();
this.mFontManager.onReload();
this.mShaderProgramManager.onReload();
VertexBufferObjectManager.onReload();
}
protected Camera getCameraFromSurfaceTouchEvent(final TouchEvent pTouchEvent) {
return this.getCamera();
}
protected Scene getSceneFromSurfaceTouchEvent(final TouchEvent pTouchEvent) {
return this.mScene;
}
protected void convertSurfaceToSceneTouchEvent(final Camera pCamera, final TouchEvent pSurfaceTouchEvent) {
pCamera.convertSurfaceToSceneTouchEvent(pSurfaceTouchEvent, this.mSurfaceWidth, this.mSurfaceHeight);
}
void onTickUpdate() throws InterruptedException {
if(this.mRunning) {
final long secondsElapsed = this.getNanosecondsElapsed();
synchronized(this.mEngineLock) {
this.onUpdate(secondsElapsed);
this.mEngineLock.notifyCanDraw();
this.mEngineLock.waitUntilCanUpdate();
}
} else {
synchronized(this.mEngineLock) {
this.mEngineLock.notifyCanDraw();
this.mEngineLock.waitUntilCanUpdate();
}
Thread.sleep(16);
}
}
public void onUpdate(final long pNanosecondsElapsed) throws InterruptedException {
final float pSecondsElapsed = pNanosecondsElapsed * TimeConstants.SECONDS_PER_NANOSECOND;
this.mSecondsElapsedTotal += pSecondsElapsed;
this.mLastTick += pNanosecondsElapsed;
this.mTouchController.onUpdate(pSecondsElapsed);
this.onUpdateUpdateHandlers(pSecondsElapsed);
this.onUpdateScene(pSecondsElapsed);
}
protected void onUpdateScene(final float pSecondsElapsed) {
if(this.mScene != null) {
this.mScene.onUpdate(pSecondsElapsed);
}
}
protected void onUpdateUpdateHandlers(final float pSecondsElapsed) {
this.mUpdateThreadRunnableHandler.onUpdate(pSecondsElapsed);
this.mUpdateHandlers.onUpdate(pSecondsElapsed);
this.getCamera().onUpdate(pSecondsElapsed);
}
protected void onUpdateDrawHandlers(final GLState pGLState, final Camera pCamera) {
this.mDrawHandlers.onDraw(pGLState, pCamera);
}
public void onDrawFrame(final GLState pGLState) throws InterruptedException {
final EngineLock engineLock = this.mEngineLock;
synchronized(engineLock) {
engineLock.waitUntilCanDraw();
this.mTextureManager.updateTextures(pGLState);
this.mFontManager.updateFonts(pGLState);
VertexBufferObjectManager.updateBufferObjects(pGLState);
this.onUpdateDrawHandlers(pGLState, this.mCamera);
this.onDrawScene(pGLState, this.mCamera);
engineLock.notifyCanUpdate();
}
}
protected void onDrawScene(final GLState pGLState, final Camera pCamera) {
if(this.mScene != null) {
this.mScene.onDraw(pGLState, pCamera);
}
pCamera.onDrawHUD(pGLState);
}
private long getNanosecondsElapsed() {
final long now = System.nanoTime();
return now - this.mLastTick;
}
public boolean enableVibrator(final Context pContext) {
this.mVibrator = (Vibrator) pContext.getSystemService(Context.VIBRATOR_SERVICE);
return this.mVibrator != null;
}
public void vibrate(final long pMilliseconds) throws IllegalStateException {
if(this.mVibrator != null) {
this.mVibrator.vibrate(pMilliseconds);
} else {
throw new IllegalStateException("You need to enable the Vibrator before you can use it!");
}
}
public void vibrate(final long[] pPattern, final int pRepeat) throws IllegalStateException {
if(this.mVibrator != null) {
this.mVibrator.vibrate(pPattern, pRepeat);
} else {
throw new IllegalStateException("You need to enable the Vibrator before you can use it!");
}
}
public void enableLocationSensor(final Context pContext, final ILocationListener pLocationListener, final LocationSensorOptions pLocationSensorOptions) {
this.mLocationListener = pLocationListener;
final LocationManager locationManager = (LocationManager) pContext.getSystemService(Context.LOCATION_SERVICE);
final String locationProvider = locationManager.getBestProvider(pLocationSensorOptions, pLocationSensorOptions.isEnabledOnly());
// TODO locationProvider can be null, in that case return false. Successful case should return true.
locationManager.requestLocationUpdates(locationProvider, pLocationSensorOptions.getMinimumTriggerTime(), pLocationSensorOptions.getMinimumTriggerDistance(), this);
this.onLocationChanged(locationManager.getLastKnownLocation(locationProvider));
}
public void disableLocationSensor(final Context pContext) {
final LocationManager locationManager = (LocationManager) pContext.getSystemService(Context.LOCATION_SERVICE);
locationManager.removeUpdates(this);
}
/**
* @see {@link Engine#enableAccelerometerSensor(Context, IAccelerometerListener, AccelerometerSensorOptions)}
*/
public boolean enableAccelerometerSensor(final Context pContext, final IAccelerometerListener pAccelerometerListener) {
return this.enableAccelerometerSensor(pContext, pAccelerometerListener, new AccelerometerSensorOptions(Engine.SENSORDELAY_DEFAULT));
}
/**
* @return <code>true</code> when the sensor was successfully enabled, <code>false</code> otherwise.
*/
public boolean enableAccelerometerSensor(final Context pContext, final IAccelerometerListener pAccelerometerListener, final AccelerometerSensorOptions pAccelerometerSensorOptions) {
final SensorManager sensorManager = (SensorManager) pContext.getSystemService(Context.SENSOR_SERVICE);
if(this.isSensorSupported(sensorManager, Sensor.TYPE_ACCELEROMETER)) {
this.mAccelerometerListener = pAccelerometerListener;
if(this.mAccelerometerData == null) {
final Display display = ((WindowManager) pContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
final int displayRotation = display.getOrientation();
this.mAccelerometerData = new AccelerometerData(displayRotation);
}
this.registerSelfAsSensorListener(sensorManager, Sensor.TYPE_ACCELEROMETER, pAccelerometerSensorOptions.getSensorDelay());
return true;
} else {
return false;
}
}
/**
* @return <code>true</code> when the sensor was successfully disabled, <code>false</code> otherwise.
*/
public boolean disableAccelerometerSensor(final Context pContext) {
final SensorManager sensorManager = (SensorManager) pContext.getSystemService(Context.SENSOR_SERVICE);
if(this.isSensorSupported(sensorManager, Sensor.TYPE_ACCELEROMETER)) {
this.unregisterSelfAsSensorListener(sensorManager, Sensor.TYPE_ACCELEROMETER);
return true;
} else {
return false;
}
}
/**
* @see {@link Engine#enableOrientationSensor(Context, IOrientationListener, OrientationSensorOptions)}
*/
public boolean enableOrientationSensor(final Context pContext, final IOrientationListener pOrientationListener) {
return this.enableOrientationSensor(pContext, pOrientationListener, new OrientationSensorOptions(Engine.SENSORDELAY_DEFAULT));
}
/**
* @return <code>true</code> when the sensor was successfully enabled, <code>false</code> otherwise.
*/
public boolean enableOrientationSensor(final Context pContext, final IOrientationListener pOrientationListener, final OrientationSensorOptions pOrientationSensorOptions) {
final SensorManager sensorManager = (SensorManager) pContext.getSystemService(Context.SENSOR_SERVICE);
if(this.isSensorSupported(sensorManager, Sensor.TYPE_ACCELEROMETER) && this.isSensorSupported(sensorManager, Sensor.TYPE_MAGNETIC_FIELD)) {
this.mOrientationListener = pOrientationListener;
if(this.mOrientationData == null) {
final Display display = ((WindowManager) pContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
final int displayRotation = display.getOrientation();
this.mOrientationData = new OrientationData(displayRotation);
}
this.registerSelfAsSensorListener(sensorManager, Sensor.TYPE_ACCELEROMETER, pOrientationSensorOptions.getSensorDelay());
this.registerSelfAsSensorListener(sensorManager, Sensor.TYPE_MAGNETIC_FIELD, pOrientationSensorOptions.getSensorDelay());
return true;
} else {
return false;
}
}
/**
* @return <code>true</code> when the sensor was successfully disabled, <code>false</code> otherwise.
*/
public boolean disableOrientationSensor(final Context pContext) {
final SensorManager sensorManager = (SensorManager) pContext.getSystemService(Context.SENSOR_SERVICE);
if(this.isSensorSupported(sensorManager, Sensor.TYPE_ACCELEROMETER) && this.isSensorSupported(sensorManager, Sensor.TYPE_MAGNETIC_FIELD)) {
this.unregisterSelfAsSensorListener(sensorManager, Sensor.TYPE_ACCELEROMETER);
this.unregisterSelfAsSensorListener(sensorManager, Sensor.TYPE_MAGNETIC_FIELD);
return true;
} else {
return false;
}
}
private boolean isSensorSupported(final SensorManager pSensorManager, final int pType) {
return pSensorManager.getSensorList(pType).size() > 0;
}
private void registerSelfAsSensorListener(final SensorManager pSensorManager, final int pType, final SensorDelay pSensorDelay) {
final Sensor sensor = pSensorManager.getSensorList(pType).get(0);
pSensorManager.registerListener(this, sensor, pSensorDelay.getDelay());
}
private void unregisterSelfAsSensorListener(final SensorManager pSensorManager, final int pType) {
final Sensor sensor = pSensorManager.getSensorList(pType).get(0);
pSensorManager.unregisterListener(this, sensor);
}
// Inner and Anonymous Classes
private class UpdateThread extends Thread {
// Constants
// Fields
// Constructors
public UpdateThread() {
super(UpdateThread.class.getSimpleName());
}
// Getter & Setter
// Methods for/from SuperClass/Interfaces
@Override
public void run() {
android.os.Process.setThreadPriority(Engine.this.mEngineOptions.getUpdateThreadPriority());
try {
while(true) {
Engine.this.onTickUpdate();
}
} catch (final InterruptedException e) {
Debug.d(this.getClass().getSimpleName() + " interrupted. Don't worry - this " + e.getClass().getSimpleName() + " is most likely expected!", e);
this.interrupt();
}
}
// Methods
// Inner and Anonymous Classes
}
public static class EngineLock {
// Constants
// Fields
boolean mDrawing = false;
// Constructors
// Getter & Setter
// Methods for/from SuperClass/Interfaces
// Methods
void notifyCanDraw() {
this.mDrawing = true;
this.notifyAll();
}
void notifyCanUpdate() {
this.mDrawing = false;
this.notifyAll();
}
void waitUntilCanDraw() throws InterruptedException {
while(!this.mDrawing) {
this.wait();
}
}
void waitUntilCanUpdate() throws InterruptedException {
while(this.mDrawing) {
this.wait();
}
}
// Inner and Anonymous Classes
}
}
|
package org.jivesoftware.sparkimpl.certificates;
import java.awt.HeadlessException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.cert.CertificateFactory;
import java.security.cert.X509Certificate;
import java.util.Enumeration;
import java.util.LinkedList;
import java.util.List;
import javax.naming.InvalidNameException;
import javax.swing.JOptionPane;
import javax.swing.JTable;
import javax.swing.SwingUtilities;
import javax.swing.table.DefaultTableModel;
import javax.swing.table.TableColumnModel;
import org.jivesoftware.Spark;
import org.jivesoftware.resource.Res;
import org.jivesoftware.spark.ui.login.CertificateDialog;
import org.jivesoftware.spark.ui.login.CertificatesManagerSettingsPanel;
import org.jivesoftware.spark.util.log.Log;
import org.jivesoftware.sparkimpl.settings.local.LocalPreferences;
public class CertificateController extends CertManager {
/**
* There are 7 KeyStores:
* TRUSTED contain user's trusted certificates
* EXCEPTIONS contain user's certificates that are added to exceptions (their's validity isn't checked)
* CACERTS contain only JRE default certificates, data is only read from it, never saved to this file
* BLACKLIST used for revoked certificates, part of super class CertManager
* DISTRUSTED_CACERTS when user remove JRE certificate then really copy of this is created in this KeyStore
* CACERTS_EXCEPTIONS used for JRE certificates that are added to exceptions (their's validity isn;t checked)
* DISPLAYED_CACERTS isn't used de facto as file as it is never saved but this object helps in keystore management.
* It contain CACERTS - (DISTRUSTED_CACERTS + CACERTSEXCEPTIONS)
*
*/
public final static File TRUSTED = new File(Spark.getSparkUserHome() + File.separator + "security" + File.separator + "truststore");
public final static File EXCEPTIONS = new File(Spark.getSparkUserHome() + File.separator + "security" + File.separator + "exceptions");
public final static File DISTRUSTED_CACERTS = new File(Spark.getSparkUserHome() + File.separator + "security" + File.separator + "distrusted_cacerts");
public final static File CACERTS_EXCEPTIONS = new File(Spark.getSparkUserHome() + File.separator + "security" + File.separator + "cacerts_exceptions");
public final static File DISPLAYED_CACERTS = new File(Spark.getSparkUserHome() + File.separator + "security" + File.separator + "displayed_cacerts");
//CACERTS should be used only for read
public final static File CACERTS = new File(System.getProperty("java.home") + File.separator + "lib"
+ File.separator + "security" + File.separator + "cacerts");
private KeyStore trustStore, exceptionsStore, displayCaStore, distrustedCaStore, exceptionsCaStore;
private List<CertificateModel> trustedCertificates = new LinkedList<>(); // contain certificates which aren't revoked or exempted
private List<CertificateModel> exemptedCertificates = new LinkedList<>(); // contain only certificates from exempted list
private List<CertificateModel> exemptedCacerts = new LinkedList<>(); // contain only exempted cacerts certificates
private List<CertificateModel> displayCaCertificates = new LinkedList<>(); // contain cacerts displayed certificates that aren't exempted
private static final String[] COLUMN_NAMES = { Res.getString("table.column.certificate.subject"),
Res.getString("table.column.certificate.validity"), Res.getString("table.column.certificate.exempted") };
private static final int NUMBER_OF_COLUMNS = COLUMN_NAMES.length;
public CertificateController(LocalPreferences localPreferences) {
if (localPreferences == null) {
throw new IllegalArgumentException("localPreferences cannot be null");
}
this.localPreferences = localPreferences;
}
/**
* Load KeyStores files.
*/
@Override
public void loadKeyStores() {
blackListStore = openKeyStore(BLACKLIST);
trustStore = openKeyStore(TRUSTED);
exceptionsStore = openKeyStore(EXCEPTIONS);
distrustedCaStore = openKeyStore(DISTRUSTED_CACERTS);
exceptionsCaStore = openKeyStore(CACERTS_EXCEPTIONS);
displayCaStore = openCacertsKeyStore();
trustedCertificates = fillTableListWithKeyStoreContent(trustStore, trustedCertificates);
exemptedCertificates = fillTableListWithKeyStoreContent(exceptionsStore, exemptedCertificates);
displayCaCertificates = fillTableListWithKeyStoreContent(displayCaStore, displayCaCertificates);
exemptedCacerts = fillTableListWithKeyStoreContent(exceptionsCaStore, exemptedCacerts);
}
public KeyStore openCacertsKeyStore() {
KeyStore caStore = openKeyStore(CACERTS);
KeyStore distrustedCaStore = openKeyStore(DISTRUSTED_CACERTS);
KeyStore exceptionsCaStore = openKeyStore(CACERTS_EXCEPTIONS);
KeyStore displayCerts = null; // displayCerts keyStore is meant to contain certificates that are in cacerts and aren't distrusted
try {
displayCerts = KeyStore.getInstance("JKS");
displayCerts.load(null, passwd);
if (caStore != null) {
Enumeration<String> store;
store = caStore.aliases();
while (store.hasMoreElements()) {
String alias = (String) store.nextElement();
X509Certificate certificate = (X509Certificate) caStore.getCertificate(alias);
// if getCertificateAlias return null then entry doesn't exist in distrustedCaStore (Java's default).
if (distrustedCaStore.getCertificateAlias(certificate) == null && exceptionsCaStore.getCertificateAlias(certificate) == null) {
displayCerts.setCertificateEntry(alias, certificate);
}
}
}
} catch (KeyStoreException | NoSuchAlgorithmException | CertificateException | IOException e) {
Log.error("Cannot read KeyStore", e);
}
return displayCerts;
}
@Override
public void overWriteKeyStores() {
saveKeyStore(trustStore, TRUSTED);
saveKeyStore(exceptionsStore, EXCEPTIONS);
saveKeyStore(blackListStore, BLACKLIST);
saveKeyStore(distrustedCaStore, DISTRUSTED_CACERTS);
saveKeyStore(exceptionsCaStore, CACERTS_EXCEPTIONS);
}
public void createCertTableModel(){
tableModel = new DefaultTableModel() {
// return adequate classes for columns so last column is Boolean
// displayed as checkbox
public Class<?> getColumnClass(int column) {
switch (column) {
case 0:
return String.class;
case 1:
return String.class;
case 2:
return Boolean.class;
default:
throw new RuntimeException("Cannot assign classes for columns");
}
}
@Override
public boolean isCellEditable(int row, int column) {
return column !=2 ? false:true;
}
};
tableModel.setColumnIdentifiers(COLUMN_NAMES);
Object[] certEntry = new Object[NUMBER_OF_COLUMNS];
addRowsToTableModel(trustedCertificates, certEntry);
addRowsToTableModel(exemptedCertificates, certEntry);
addRowsToTableModel(displayCaCertificates, certEntry);
addRowsToTableModel(exemptedCacerts, certEntry);
}
/**
* Adds list to the certificate table so it is displayed in the table.
*
* @param certList is list with CertificateModel object that are added to the
* @param certEntry serves as table row model. Each element of that array is corresponding to the column in table
*/
private void addRowsToTableModel(List<CertificateModel> certList, Object[] certEntry){
if (certList != null) {
// put certificate from arrayList into rows with chosen columns
for (CertificateModel cert : certList) {
tableModel.addRow(fillTableWithList(certEntry, cert));
}
}
}
/**
* Create certificate entry, which can be added in row of the certificate table.
*
* @param certEntry serves as table row model. Each element of that array is corresponding to the column in table
* @param cert is CertificateModel for which this class will return object representing table's row
* @return certificate entry which is array of objects which values depends on this method. Elements are: [0] String [1] String [2] boolean
*/
private Object[] fillTableWithList(Object[] certEntry, CertificateModel cert) {
if (cert.getSubjectCommonName() != null) {
certEntry[0] = cert.getSubjectCommonName();
} else {
certEntry[0] = cert.getSubject();
}
certEntry[1] = cert.getValidityStatus();
certEntry[2] = isOnExceptionList(cert);
return certEntry;
}
/**
* If argument is true then move certificate to the exceptions Keystore, if false then move to the trusted Keystore.
* Useful for checkboxes where it's selected value indicates where certificate should be moved.
* @param checked should it be moved?
*/
@Override
public void addOrRemoveFromExceptionList(boolean checked) {
int row = CertificatesManagerSettingsPanel.getCertTable().getSelectedRow();
String alias = allCertificates.get(row).getAlias();
if (getAliasKeyStorePath(alias).equals(TRUSTED) || getAliasKeyStorePath(alias).equals(EXCEPTIONS)) {
if (checked) {
try {
moveCertificate(TRUSTED, EXCEPTIONS);
} catch (KeyStoreException | NoSuchAlgorithmException | CertificateException | IOException ex) {
Log.error("Error at moving certificate from trusted list to the exception list", ex);
}
} else {
try {
moveCertificate(EXCEPTIONS, TRUSTED);
} catch (KeyStoreException | NoSuchAlgorithmException | CertificateException | IOException ex) {
Log.error("Error at moving certificate from exceptions list to trusted list", ex);
}
}
} else if (getAliasKeyStorePath(alias).equals(DISPLAYED_CACERTS)
|| getAliasKeyStorePath(alias).equals(CACERTS_EXCEPTIONS)) {
if (checked) {
try {
moveCertificate(DISPLAYED_CACERTS, CACERTS_EXCEPTIONS);
} catch (KeyStoreException | NoSuchAlgorithmException | CertificateException | IOException ex) {
Log.error("Error at moving certificate from trusted list to the exception list", ex);
}
} else {
try {
moveCertificate(CACERTS_EXCEPTIONS, DISPLAYED_CACERTS);
} catch (KeyStoreException | NoSuchAlgorithmException | CertificateException | IOException ex) {
Log.error("Error at moving certificate from exceptions list to trusted list", ex);
}
}
}
}
/**
* Return information if certificate is on exception list.
*
* @param Certificate Model entry
*/
@Override
public boolean isOnExceptionList(CertificateModel cert) {
if(exemptedCertificates.contains(cert)){
return true;
}else if (exemptedCacerts.contains(cert)){
return true;
}else {
return false;
}
}
/**
* Return information if certificate is on blacklist (revoked).
*
* @param Certificate Model entry
*/
public boolean isOnBlackList(CertificateModel cert) {
return blackListedCertificates.contains(cert);
}
/**
* Return file path which contains certificate with given alias;
*
* @param alias of the certificate
* @return File path of KeyStore with certificate
*/
private KeyStore getAliasKeyStore(String alias) {
for (CertificateModel model : exemptedCertificates) {
if (model.getAlias().equals(alias)) {
return exceptionsStore;
}
}
for (CertificateModel model : blackListedCertificates) {
if (model.getAlias().equals(alias)) {
return blackListStore;
}
}
for (CertificateModel model : trustedCertificates) {
if (model.getAlias().equals(alias)) {
return trustStore;
}
}
for (CertificateModel model : displayCaCertificates) {
if (model.getAlias().equals(alias)) {
return displayCaStore;
}
}
for (CertificateModel model : exemptedCacerts) {
if (model.getAlias().equals(alias)) {
return exceptionsCaStore;
}
}
return null;
}
/**
* Return file path which contains certificate with given alias;
*
* @param alias of the certificate
* @return File path of KeyStore with certificate
*/
private File getAliasKeyStorePath(String alias) {
for (CertificateModel model : exemptedCertificates) {
if (model.getAlias().equals(alias)) {
return EXCEPTIONS;
}
}
for (CertificateModel model : blackListedCertificates) {
if (model.getAlias().equals(alias)) {
return BLACKLIST;
}
}
for (CertificateModel model : trustedCertificates) {
if (model.getAlias().equals(alias)) {
return TRUSTED;
}
}
for (CertificateModel model : displayCaCertificates) {
if (model.getAlias().equals(alias)) {
return DISPLAYED_CACERTS;
}
}
for (CertificateModel model : exemptedCacerts) {
if (model.getAlias().equals(alias)) {
return CACERTS_EXCEPTIONS;
}
}
return null;
}
/**
* This method delete certificate with provided alias from the Truststore
*
* @param alias Alias of the certificate to delete
* @throws KeyStoreException
* @throws IOException
* @throws NoSuchAlgorithmException
* @throws CertificateException
*/
@Override
public void deleteEntry(String alias) throws KeyStoreException {
int dialogButton = JOptionPane.YES_NO_OPTION;
int dialogValue = JOptionPane.showConfirmDialog(null, Res.getString("dialog.certificate.sure.to.delete"), null,
dialogButton);
if (dialogValue == JOptionPane.YES_OPTION) {
KeyStore store = getAliasKeyStore(alias);
if(store.equals(displayCaStore)){
// adds entry do distrusted store so it will be not displayed next time
distrustedCaStore.setCertificateEntry(alias, store.getCertificate(alias));
}
store.deleteEntry(alias);
JOptionPane.showMessageDialog(null, Res.getString("dialog.certificate.has.been.deleted"));
CertificateModel model = null;
for (CertificateModel certModel : allCertificates) {
if (certModel.getAlias().equals(alias)) {
model = certModel;
}
}
exemptedCertificates.remove(model);
trustedCertificates.remove(model);
blackListedCertificates.remove(model);
displayCaCertificates.remove(model);
allCertificates.remove(model);
}
refreshCertTable();
}
/**
* Refresh certificate table to make visible changes in it's model
*/
@Override
public void refreshCertTable() {
createCertTableModel();
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
resizeColumnWidth(CertificatesManagerSettingsPanel.getCertTable());
CertificatesManagerSettingsPanel.getCertTable().setModel(tableModel);
tableModel.fireTableDataChanged();
}
});
}
/**
* Resizes certificate table to preferred width.
*/
public void resizeColumnWidth(JTable table) {
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
final TableColumnModel columnModel = table.getColumnModel();
final int maxWidth = table.getParent().getWidth();
columnModel.getColumn(1).setPreferredWidth(80);
columnModel.getColumn(2).setPreferredWidth(60);
columnModel.getColumn(0).setPreferredWidth(maxWidth - 140);
}
});
}
/**
* This method transfer certificate from source KeyStore to target KeyStore.
* @throws IOException
* @throws CertificateException
* @throws NoSuchAlgorithmException
* @throws KeyStoreException
*/
public void moveCertificate(File source, File target) throws KeyStoreException, NoSuchAlgorithmException, CertificateException, IOException {
int row = CertificatesManagerSettingsPanel.getCertTable().getSelectedRow();
String alias = allCertificates.get(row).getAlias();
moveCertificate(source, target, alias);
}
/**
* This method transfer certificate from source KeyStore to target KeyStore.
*
* @param source
* File with source KeyStore
* @param target
* File with target KeyStore
* @param alias
* Alias of the certificate meant to move
* @throws IOException
* @throws KeyStoreException
* @throws NoSuchAlgorithmException
* @throws CertificateException
*/
public void moveCertificate(File source, File target, String alias)
throws IOException, KeyStoreException, NoSuchAlgorithmException, CertificateException {
if (!source.equals(TRUSTED) && !source.equals(BLACKLIST) && !source.equals(EXCEPTIONS)
&& !source.equals(DISPLAYED_CACERTS) && !source.equals(CACERTS_EXCEPTIONS) &&
!target.equals(TRUSTED) && !target.equals(EXCEPTIONS) && !target.equals(BLACKLIST)
&& !target.equals(DISPLAYED_CACERTS) && !target.equals(CACERTS_EXCEPTIONS)) {
throw new IllegalArgumentException();
}
KeyStore sourceStore = null;
if (source.equals(TRUSTED)) {
sourceStore = trustStore;
} else if (source.equals(EXCEPTIONS)) {
sourceStore = exceptionsStore;
} else if (source.equals(BLACKLIST)) {
sourceStore = blackListStore;
} else if (source.equals(DISPLAYED_CACERTS)) {
sourceStore = displayCaStore;
} else if (source.equals(CACERTS_EXCEPTIONS)) {
sourceStore = exceptionsCaStore;
}
KeyStore targetStore = null;
if (target.equals(TRUSTED)) {
targetStore = trustStore;
} else if (target.equals(EXCEPTIONS)) {
targetStore = exceptionsStore;
} else if (target.equals(BLACKLIST)) {
targetStore = blackListStore;
} else if (target.equals(DISPLAYED_CACERTS)) {
targetStore = displayCaStore;
} else if (target.equals(CACERTS_EXCEPTIONS)) {
targetStore = exceptionsCaStore;
}
X509Certificate cert = (X509Certificate) sourceStore.getCertificate(alias);
targetStore.setCertificateEntry(alias, cert);
sourceStore.deleteEntry(alias);
}
/**
* This method add certificate from file (*.cer), (*.crt), (*.der), (*.pem) to TrustStore.
*
* @param file File with certificate that is added
* @throws KeyStoreException
* @throws CertificateException
* @throws NoSuchAlgorithmException
* @throws IOException
* @throws InvalidNameException
* @throws HeadlessException
*/
@Override
public void addEntryToKeyStore(File file) throws IOException, CertificateException,
KeyStoreException, HeadlessException, InvalidNameException {
if (file == null) {
throw new IllegalArgumentException();
}
try (InputStream inputStream = new FileInputStream(file)) {
CertificateFactory cf = CertificateFactory.getInstance("X509");
X509Certificate addedCert = (X509Certificate) cf.generateCertificate(inputStream);
CertificateModel certModel = new CertificateModel(addedCert);
if (checkForSameCertificate(addedCert) == false) {
showCertificate(certModel, CertificateDialogReason.ADD_CERTIFICATE);
}
// value of addToKeyStore is changed by setter in CertificateDialog
if (addToKeystore == true) {
addToKeystore = false;
String alias = useCommonNameAsAlias(addedCert);
trustStore.setCertificateEntry(alias, addedCert);
trustedCertificates.add(new CertificateModel(addedCert));
refreshCertTable();
JOptionPane.showMessageDialog(null, Res.getString("dialog.certificate.has.been.added"));
}
}
}
/**
* Check if there is certificate entry in Truststore with the same alias.
*
* @param alias Alias of the certificate which is looked for in the model list
* @return True if KeyStore contain the same alias.
* @throws HeadlessException
* @throws KeyStoreException
*/
protected boolean checkForSameAlias(String alias) throws HeadlessException, KeyStoreException {
for(CertificateModel model: allCertificates){
if(model.getAlias().equals(alias)){
return true;
}
}
return false;
}
/**
* Open dialog with certificate.
*/
public void showCertificate() {
CertificateDialog certDialog = new CertificateDialog(localPreferences,
allCertificates.get(CertificatesManagerSettingsPanel.getCertTable().getSelectedRow()), this, CertificateDialogReason.SHOW_CERTIFICATE);
}
public List<CertificateModel> getAllCertificates() {
return allCertificates;
}
public DefaultTableModel getTableModel() {
return tableModel;
}
public void setTableModel(DefaultTableModel tableModel) {
CertManager.tableModel = tableModel;
}
}
|
package org.exist.xquery;
import org.exist.xquery.util.Error;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.StringValue;
import org.exist.xquery.value.Type;
public class ConcatExpr extends PathExpr {
public ConcatExpr(XQueryContext context) {
super(context);
}
@Override
public void analyze(AnalyzeContextInfo contextInfo) throws XPathException {
if(getContext().getXQueryVersion() < 30){
throw new XPathException(this, ErrorCodes.EXXQDY0003,
"string concatenation operator is not available before XQuery 3.0");
}
super.analyze(contextInfo);
}
@Override
public void add(PathExpr pathExpr) {
Expression expr = new DynamicCardinalityCheck(context, Cardinality.ZERO_OR_ONE, pathExpr,
new Error(Error.FUNC_PARAM_CARDINALITY));
if (!Type.subTypeOf(expr.returnsType(), Type.ATOMIC))
expr = new Atomize(context, expr);
super.add(expr);
}
@Override
public Sequence eval(Sequence contextSequence, Item contextItem)
throws XPathException {
if (context.getProfiler().isEnabled()) {
context.getProfiler().start(this);
context.getProfiler().message(this, Profiler.DEPENDENCIES, "DEPENDENCIES", Dependency.getDependenciesName(this.getDependencies()));
if (contextSequence != null)
context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT SEQUENCE", contextSequence);
if (contextItem != null)
context.getProfiler().message(this, Profiler.START_SEQUENCES, "CONTEXT ITEM", contextItem.toSequence());
}
StringBuilder concat = new StringBuilder();
for(Expression step : steps) {
concat.append(step.eval(contextSequence, contextItem).getStringValue());
}
StringValue result = new StringValue(concat.toString());
if (context.getProfiler().isEnabled())
context.getProfiler().end(this, "", result);
return result;
}
@Override
public int returnsType() {
return Type.STRING;
}
@Override
public int getCardinality() {
return Cardinality.EXACTLY_ONE;
}
}
|
package org.exist.xquery.value;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.exist.Namespaces;
import org.exist.dom.QName;
import org.exist.util.hashtable.Int2ObjectHashMap;
import org.exist.util.hashtable.Object2IntHashMap;
import org.exist.xquery.XPathException;
import java.util.HashSet;
/**
* Defines all built-in types and their relations.
*
* @author Wolfgang Meier (wolfgang@exist-db.org)
*/
public class Type {
public static final int NODE = -1;
public final static int ELEMENT = 1;
public final static int ATTRIBUTE = 2;
public final static int TEXT = 3;
public final static int PROCESSING_INSTRUCTION = 4;
public final static int COMMENT = 5;
public final static int DOCUMENT = 6;
public final static int NAMESPACE = 500;
public final static int CDATA_SECTION = 501;
public final static int EMPTY = 10;
public final static int ITEM = 11;
public final static int ANY_TYPE = 12;
public final static int ANY_SIMPLE_TYPE = 13;
public final static int UNTYPED = 14;
public final static int ATOMIC = 20;
public final static int UNTYPED_ATOMIC = 21;
public final static int STRING = 22;
public final static int BOOLEAN = 23;
public final static int QNAME = 24;
public final static int ANY_URI = 25;
public final static int BASE64_BINARY = 26;
public final static int HEX_BINARY = 27;
public final static int NOTATION = 28;
public final static int NUMBER = 30;
public final static int INTEGER = 31;
public final static int DECIMAL = 32;
public final static int FLOAT = 33;
public final static int DOUBLE = 34;
public final static int NON_POSITIVE_INTEGER = 35;
public final static int NEGATIVE_INTEGER = 36;
public final static int LONG = 37;
public final static int INT = 38;
public final static int SHORT = 39;
public final static int BYTE = 40;
public final static int NON_NEGATIVE_INTEGER = 41;
public final static int UNSIGNED_LONG = 42;
public final static int UNSIGNED_INT = 43;
public final static int UNSIGNED_SHORT = 44;
public final static int UNSIGNED_BYTE = 45;
public final static int POSITIVE_INTEGER = 46;
public final static int DATE_TIME = 50;
public final static int DATE = 51;
public final static int TIME = 52;
public final static int DURATION = 53;
public final static int YEAR_MONTH_DURATION = 54;
public final static int DAY_TIME_DURATION = 55;
public final static int GYEAR = 56;
public final static int GMONTH = 57;
public final static int GDAY = 58;
public final static int GYEARMONTH = 59;
public final static int GMONTHDAY = 71;
public final static int TOKEN = 60;
public final static int NORMALIZED_STRING = 61;
public final static int LANGUAGE = 62;
public final static int NMTOKEN = 63;
public final static int NAME = 64;
public final static int NCNAME = 65;
public final static int ID = 66;
public final static int IDREF = 67;
public final static int ENTITY = 68;
public final static int JAVA_OBJECT = 100;
public final static int FUNCTION_REFERENCE = 101;
public final static int MAP = 102;
public final static int ARRAY = 103;
private final static Logger LOG = LogManager.getLogger(Type.class);
private final static int[] superTypes = new int[512];
private final static Int2ObjectHashMap<String[]> typeNames = new Int2ObjectHashMap<>(100);
//private final static Map<Integer, String[]> typeNames= new HashMap<Integer, String[]>(100);
private final static Object2IntHashMap<String> typeCodes = new Object2IntHashMap<>(100);
static {
defineSubType(ANY_TYPE, ANY_SIMPLE_TYPE);
defineSubType(ANY_TYPE, UNTYPED);
defineSubType(ANY_SIMPLE_TYPE, ATOMIC);
defineSubType(NODE, ELEMENT);
defineSubType(NODE, ATTRIBUTE);
defineSubType(NODE, TEXT);
defineSubType(NODE, PROCESSING_INSTRUCTION);
defineSubType(NODE, COMMENT);
defineSubType(NODE, DOCUMENT);
defineSubType(NODE, NAMESPACE);
defineSubType(NODE, CDATA_SECTION);
//THIS type system is broken - some of the below should be sub-types of ANY_SIMPLE_TYPE
//and some should not!
defineSubType(ITEM, ATOMIC);
defineSubType(ATOMIC, STRING);
defineSubType(ATOMIC, BOOLEAN);
defineSubType(ATOMIC, QNAME);
defineSubType(ATOMIC, ANY_URI);
defineSubType(ATOMIC, NUMBER);
defineSubType(ATOMIC, UNTYPED_ATOMIC);
defineSubType(ATOMIC, JAVA_OBJECT);
defineSubType(ATOMIC, DATE_TIME);
defineSubType(ATOMIC, DATE);
defineSubType(ATOMIC, TIME);
defineSubType(ATOMIC, DURATION);
defineSubType(ATOMIC, GYEAR);
defineSubType(ATOMIC, GMONTH);
defineSubType(ATOMIC, GDAY);
defineSubType(ATOMIC, GYEARMONTH);
defineSubType(ATOMIC, GMONTHDAY);
defineSubType(ATOMIC, BASE64_BINARY);
defineSubType(ATOMIC, HEX_BINARY);
defineSubType(ATOMIC, NOTATION);
defineSubType(DURATION, YEAR_MONTH_DURATION);
defineSubType(DURATION, DAY_TIME_DURATION);
defineSubType(NUMBER, DECIMAL);
defineSubType(NUMBER, FLOAT);
defineSubType(NUMBER, DOUBLE);
defineSubType(DECIMAL, INTEGER);
defineSubType(INTEGER, NON_POSITIVE_INTEGER);
defineSubType(NON_POSITIVE_INTEGER, NEGATIVE_INTEGER);
defineSubType(INTEGER, LONG);
defineSubType(LONG, INT);
defineSubType(INT, SHORT);
defineSubType(SHORT, BYTE);
defineSubType(INTEGER, NON_NEGATIVE_INTEGER);
defineSubType(NON_NEGATIVE_INTEGER, POSITIVE_INTEGER);
defineSubType(NON_NEGATIVE_INTEGER, UNSIGNED_LONG);
defineSubType(UNSIGNED_LONG, UNSIGNED_INT);
defineSubType(UNSIGNED_INT, UNSIGNED_SHORT);
defineSubType(UNSIGNED_SHORT, UNSIGNED_BYTE);
defineSubType(STRING, NORMALIZED_STRING);
defineSubType(NORMALIZED_STRING, TOKEN);
defineSubType(TOKEN, LANGUAGE);
defineSubType(TOKEN, NMTOKEN);
defineSubType(TOKEN, NAME);
defineSubType(NAME, NCNAME);
defineSubType(NCNAME, ID);
defineSubType(NCNAME, IDREF);
defineSubType(NCNAME, ENTITY);
defineSubType(ITEM, FUNCTION_REFERENCE);
defineSubType(FUNCTION_REFERENCE, MAP);
defineSubType(FUNCTION_REFERENCE, ARRAY);
}
static {
//TODO : use NODETYPES above ?
//TODO use parentheses after the nodes name ?
defineBuiltInType(NODE, "node()");
defineBuiltInType(ITEM, "item()");
defineBuiltInType(EMPTY, "empty-sequence()","empty()"); // keep empty() for backward compatibility
defineBuiltInType(ELEMENT, "element()");
defineBuiltInType(DOCUMENT, "document-node()");
defineBuiltInType(ATTRIBUTE, "attribute()");
defineBuiltInType(TEXT, "text()");
defineBuiltInType(PROCESSING_INSTRUCTION, "processing-instruction()");
defineBuiltInType(COMMENT, "comment()");
defineBuiltInType(NAMESPACE, "namespace()");
defineBuiltInType(CDATA_SECTION, "cdata-section()");
defineBuiltInType(JAVA_OBJECT, "object");
defineBuiltInType(FUNCTION_REFERENCE, "function(*)", "function");
defineBuiltInType(MAP, "map(*)", "map"); // keep map for backward compatibility
defineBuiltInType(ARRAY, "array(*)","array");
defineBuiltInType(NUMBER, "numeric");
defineBuiltInType(ANY_TYPE, "xs:anyType");
defineBuiltInType(ANY_SIMPLE_TYPE, "xs:anySimpleType");
defineBuiltInType(UNTYPED, "xs:untyped");
//Duplicate definition : new one first
defineBuiltInType(ATOMIC, "xs:anyAtomicType", "xdt:anyAtomicType");
//Duplicate definition : new one first
defineBuiltInType(UNTYPED_ATOMIC, "xs:untypedAtomic", "xdt:untypedAtomic");
defineBuiltInType(BOOLEAN, "xs:boolean");
defineBuiltInType(DECIMAL, "xs:decimal");
defineBuiltInType(FLOAT, "xs:float");
defineBuiltInType(DOUBLE, "xs:double");
defineBuiltInType(INTEGER, "xs:integer");
defineBuiltInType(NON_POSITIVE_INTEGER, "xs:nonPositiveInteger");
defineBuiltInType(NEGATIVE_INTEGER, "xs:negativeInteger");
defineBuiltInType(LONG, "xs:long");
defineBuiltInType(INT, "xs:int");
defineBuiltInType(SHORT, "xs:short");
defineBuiltInType(BYTE, "xs:byte");
defineBuiltInType(NON_NEGATIVE_INTEGER, "xs:nonNegativeInteger");
defineBuiltInType(UNSIGNED_LONG, "xs:unsignedLong");
defineBuiltInType(UNSIGNED_INT, "xs:unsignedInt");
defineBuiltInType(UNSIGNED_SHORT, "xs:unsignedShort");
defineBuiltInType(UNSIGNED_BYTE, "xs:unsignedByte");
defineBuiltInType(POSITIVE_INTEGER, "xs:positiveInteger");
defineBuiltInType(STRING, "xs:string");
defineBuiltInType(QNAME, "xs:QName");
defineBuiltInType(ANY_URI, "xs:anyURI");
defineBuiltInType(BASE64_BINARY, "xs:base64Binary");
defineBuiltInType(HEX_BINARY, "xs:hexBinary");
defineBuiltInType(NOTATION, "xs:NOTATION");
defineBuiltInType(DATE_TIME, "xs:dateTime");
defineBuiltInType(DATE, "xs:date");
defineBuiltInType(TIME, "xs:time");
defineBuiltInType(DURATION, "xs:duration");
defineBuiltInType(GYEAR, "xs:gYear");
defineBuiltInType(GMONTH, "xs:gMonth");
defineBuiltInType(GDAY, "xs:gDay");
defineBuiltInType(GYEARMONTH, "xs:gYearMonth");
defineBuiltInType(GMONTHDAY, "xs:gMonthDay");
//Duplicate definition : new one first
defineBuiltInType(YEAR_MONTH_DURATION, "xs:yearMonthDuration", "xdt:yearMonthDuration");
//Duplicate definition : new one first
defineBuiltInType(DAY_TIME_DURATION, "xs:dayTimeDuration", "xdt:dayTimeDuration");
defineBuiltInType(NORMALIZED_STRING, "xs:normalizedString");
defineBuiltInType(TOKEN, "xs:token");
defineBuiltInType(LANGUAGE, "xs:language");
defineBuiltInType(NMTOKEN, "xs:NMTOKEN");
defineBuiltInType(NAME, "xs:Name");
defineBuiltInType(NCNAME, "xs:NCName");
defineBuiltInType(ID, "xs:ID");
defineBuiltInType(IDREF, "xs:IDREF");
defineBuiltInType(ENTITY, "xs:ENTITY");
}
/**
* @param name The first name is the default name, any other names are aliases.
*/
public static void defineBuiltInType(int type, String... name) {
typeNames.put(type, name);
for (final String n : name) {
typeCodes.put(n, type);
}
}
/**
* Get the internal default name for the built-in type.
*
* @param type
*/
public static String getTypeName(int type) {
return typeNames.get(type)[0];
}
/**
* Get the internal aliases for the built-in type.
*
* @param type
*/
public static String[] getTypeAliases(int type) {
final String names[] = typeNames.get(type);
if (names != null && names.length > 1) {
final String aliases[] = new String[names.length - 1];
System.arraycopy(names, 1, aliases, 0, names.length - 1);
return aliases;
}
return null;
}
/**
* Get the type code for a type identified by its internal name.
*
* @param name
* @throws XPathException
*/
public static int getType(String name) throws XPathException {
//if (name.equals("node"))
// return NODE;
final int code = typeCodes.get(name);
if (code == Object2IntHashMap.UNKNOWN_KEY) {
throw new XPathException("Type: " + name + " is not defined");
}
return code;
}
/**
* Get the type code for a type identified by its QName.
*
* @param qname
* @throws XPathException
*/
public static int getType(QName qname) throws XPathException {
final String uri = qname.getNamespaceURI();
switch (uri) {
case Namespaces.SCHEMA_NS:
return getType("xs:" + qname.getLocalPart());
case Namespaces.XPATH_DATATYPES_NS:
return getType("xdt:" + qname.getLocalPart());
default:
return getType(qname.getLocalPart());
}
}
/**
* Define supertype/subtype relation.
*
* @param supertype
* @param subtype
*/
public static void defineSubType(int supertype, int subtype) {
superTypes[subtype] = supertype;
}
public static boolean subTypeOf(int subtype, int supertype) {
if (subtype == supertype) {
return true;
}
//Note that it will return true even if subtype == EMPTY
if (supertype == ITEM || supertype == ANY_TYPE)
//maybe return subtype != EMPTY ?
{
return true;
}
//Note that EMPTY is *not* a sub-type of anything else than itself
//EmptySequence has to take care of this when it checks its type
if (subtype == ITEM || subtype == EMPTY || subtype == ANY_TYPE || subtype == NODE) {
return false;
}
subtype = superTypes[subtype];
if (subtype == 0) {
throw new IllegalArgumentException(
"type " + subtype + " is not a valid type");
}
return subTypeOf(subtype, supertype);
}
/**
* Get the type code of the supertype of the specified subtype.
*
* @param subtype
*/
public static int getSuperType(final int subtype) {
if (subtype == ITEM || subtype == NODE) {
return ITEM;
}
final int supertype = superTypes[subtype];
if (supertype == 0) {
LOG.warn("eXist does not define a super-type for the sub-type {}", getTypeName(subtype), new Throwable());
return ITEM;
}
return supertype;
}
/**
* Find a common supertype for two given type codes.
* <p>
* Type.ITEM is returned if no other common supertype
* is found.
*
* @param type1
* @param type2
*/
public static int getCommonSuperType(int type1, int type2) {
//Super shortcut
if (type1 == type2) {
return type1;
}
// if one of the types is empty(), return the other type: optimizer is free to choose
// an optimization based on the more specific type.
if (type1 == Type.EMPTY) {
return type2;
} else if (type2 == Type.EMPTY) {
return type1;
}
//TODO : optimize by swapping the arguments based on their numeric values ?
//Processing lower value first *should* reduce the size of the Set
//Collect type1's super-types
final HashSet<Integer> t1 = new HashSet<>();
//Don't introduce a shortcut (starting at getSuperType(type1) here
//type2 might be a super-type of type1
int t;
for (t = type1; t != ITEM; t = getSuperType(t)) {
//Shortcut
if (t == type2) {
return t;
}
t1.add(t);
}
//Starting from type2's super type : the shortcut should have done its job
for (t = getSuperType(type2); t != ITEM; t = getSuperType(t)) {
if (t1.contains(t)) {
return t;
}
}
return ITEM;
}
}
|
package io.anyway.galaxy.proxy;
import javassist.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
public class ProxyFactory {
private static final ConcurrentMap<String, Class<?>> CLASSES = new ConcurrentHashMap<String, Class<?>>();
private static final ConcurrentMap<Class<?>, Object> INSTANCES = new ConcurrentHashMap<Class<?>, Object>();
/**
*
*
* @param object
* @param clz
* @param types
* @param <T>
* @return
*/
public static <T> T getProxy(Object object, Class<?> clz, Class<?>[] types) throws Throwable {
return getInstance(getProxyClass(object, clz, types));
}
private static <T> T getInstance(Class<?> clazz) throws Throwable {
if (clazz == null) {
return null;
}
T instance = (T) INSTANCES.get(clazz);
if (instance == null) {
INSTANCES.putIfAbsent(clazz, (T) clazz.newInstance());
instance = (T) INSTANCES.get(clazz);
}
return instance;
}
private static Class<?> getProxyClass(Object object, Class<?> clz, Class<?>[] types) throws Throwable{
if (object == null) {
return null;
}
Class<?> cls = CLASSES.get(object.getClass().getSimpleName());
if (cls != null) {
return cls;
}
ClassPool pool = new ClassPool(true);
pool.appendClassPath(new LoaderClassPath(getClassloader(object)));
CtClass cc = pool.makeClass(object.getClass().getSimpleName() + "ProxyStub");
cc.addInterface(pool.get(clz.getName()));
// Method append
CtMethod mthd;
StringBuilder sb = new StringBuilder();
String methodName;
for (int i = 0; i < clz.getMethods().length; i++) {
methodName = clz.getMethods()[i].getName();
sb.append("public void ").append(methodName).append("(Object target, Object[] args){ ");
sb.append("((").append(object.getClass().getName()).append(")target).").append(methodName).append("(");
for (int j = 0; j < types.length; j++) {
if (j == 0) {
sb.append("(").append(types[j].getName()).append(")").append("args[").append(j).append("]");
} else {
sb.append(", (").append(types[j].getName()).append(")").append("args[").append(j).append("]");
}
}
sb.append(");}");
System.out.println(sb.toString());
mthd = CtNewMethod.make(sb.toString(),cc);
cc.addMethod(mthd);
sb.setLength(0);
}
//Class
cls = cc.toClass();
// TODO method Try
CLASSES.putIfAbsent(object.getClass().getSimpleName(), cls);
return cls;
}
private static ClassLoader getClassloader(Object object) {
ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) classLoader = object.getClass().getClassLoader();
return classLoader;
}
}
|
/* Open Source Software - may be modified and shared by FRC teams. The code */
/* the project. */
package org.foximus.prime;
import com.sun.squawk.util.MathUtils;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.Jaguar;
import edu.wpi.first.wpilibj.Relay;
import edu.wpi.first.wpilibj.Joystick;
import edu.wpi.first.wpilibj.RobotDrive;
import edu.wpi.first.wpilibj.DriverStationLCD;
import edu.wpi.first.wpilibj.GenericHID.Hand;
import edu.wpi.first.wpilibj.Victor;
import edu.wpi.first.wpilibj.AnalogChannel;
/**
* The VM is configured to automatically run this class, and to call the
* functions corresponding to each mode, as described in the IterativeRobot
* documentation. If you change the name of this class or the package after
* creating this project, you must also update the manifest file in the resource
* directory.
*/
public class mainRobot extends IterativeRobot {
double THETA = Math.toRadians(48); //48 degrees as radians. Current shooter angle.
double G = 9.81;
//http://en.wikipedia.org/wiki/Trajectory_of_a_projectile#Notation
double LOWBASKETY = .7112;
double MIDBASKETY = 1.549;
double TOPBASKETY = 2.489;
double YOFFSET = -1.003/*shooter height*/ + .1524/*center backthing*/;
double XOFFSET = 0;
AnalogChannel ultrasonic = new AnalogChannel(1);
AnalogChannel potentiameter = new AnalogChannel(2);
Relay botPickup = new Relay(1);
Relay topPickup = new Relay(2);
Victor shooterT = new Victor(8);
Victor shooterB = new Victor(9);
Victor arm = new Victor(5);
Victor shootRot = new Victor(6);
Joystick joy1 = new Joystick(1);
Joystick joy2 = new Joystick(2);
RobotDrive drive = new RobotDrive(3,4);
public double getXDistance(){ //currently meters. METRIC!
double d = ultrasonic.getVoltage() / 0.009766;
d *= 0.0254;
return d;
}
public double v(double x, double y, double theta, double g){
double v;
v= -4 * ( 2*g*y - 2*Math.tan(theta)) * (g*g*x*x+Math.tan(theta)*Math.tan(theta));
if(v < 0)
return -1;
v = Math.sqrt(v) / (2 *(2*y*g - 2*Math.tan(theta)));
if(v<0)
v = -v;
return v;
}
public double speedToPower(double speed){//meterspersecond. Based on exponential regression and experiemental results.
return .160119* MathUtils.pow(1.2836555,speed);
}
public double calcShooterPower(double basket){
return speedToPower(v(getXDistance()+XOFFSET, basket + YOFFSET,THETA, G));
}
/**
* This function is run when the robot is first started up and should be
* used for any initialization code.
*/
public void robotInit() {
getWatchdog().setEnabled(false);
botPickup.setDirection(Relay.Direction.kReverse);
topPickup.setDirection(Relay.Direction.kForward);
drive.arcadeDrive(joy1);
}
/**
* This function is called periodically during autonomous
*/
public void autonomousPeriodic() {
}
/**
* This function is called periodically during operator control
*/
public void teleopPeriodic() {
//double magnitude, direction, rotation;
//Create "deadzone" variables. Adjust threshold value to increase/decrease deadzone
//double X2 = 0, Y1 = 0, X1 = 0, threshold = 15.0;
while (true && isOperatorControl() && isEnabled()) // loop until change
{
double selectedBasket = LOWBASKETY;
if(-joy2.getY() > 0.0){
shooterT.set(-joy2.getY());
shooterB.set(-joy2.getY());
} else if (joy2.getRawButton(8)) {
shooterT.set(calcShooterPower(LOWBASKETY));
shooterB.set(calcShooterPower(LOWBASKETY));
selectedBasket = LOWBASKETY;
} else if (joy2.getRawButton(9)) {
shooterT.set(calcShooterPower(MIDBASKETY));
shooterB.set(calcShooterPower(MIDBASKETY));
selectedBasket = MIDBASKETY;
} else if (joy2.getRawButton(10)) {
shooterT.set(calcShooterPower(TOPBASKETY));
shooterB.set(calcShooterPower(TOPBASKETY));
selectedBasket = TOPBASKETY;
} else {
shooterT.set(0.0);
shooterB.set(0.0);
}
if(joy2.getRawButton(3))
botPickup.set(Relay.Value.kOn);
else
botPickup.set(Relay.Value.kOff);
if(joy2.getRawButton(6))
arm.set(0.5);
else if(joy2.getRawButton(7))
arm.set(-0.5);
else
arm.set(0.0);
shootRot.set(joy2.getX());
if(joy2.getTrigger())
topPickup.set(Relay.Value.kOn);
//else if(joy2.getRawButton(3))
// botPickup.set(1.0);
else
topPickup.set(Relay.Value.kOff);
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser2, 1, "Pent:"+potentiameter.getVoltage());
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser4, 1, "Sonic:"+getXDistance());
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser4, 1, "CalcedPower:"+ calcShooterPower(selectedBasket));
DriverStationLCD.getInstance().updateLCD();
//drive.tankDrive(joy1, joy2);
/*
if(joy1.getTrigger(Hand.kLeft)) {
shooterT.set(joy1.getThrottle());
shooterB.set(joy1.getThrottle());
} else { shooterT.set(0); shooterB.set(0); }
if(joy1.getRawButton(6)) {
secondLift.set(joy2.getThrottle());
} else {secondLift.set(0);}
if(joy1.getRawButton(7)) {
firstLift.set(joy2.getThrottle());
} else { firstLift.set(0); }
if(joy1.getRawButton(8)) {
shooterRot.set(.1);
} else { shooterRot.set(0); }
if(joy1.getRawButton(9)) {
shooterRot.set(-.1);
} else { shooterRot.set(0); }
*/
/*magnitude = joy1.getMagnitude();
direction = joy1.getDirectionDegrees();
rotation = joy2.getX();*/
//drive.mecanumDrive_Cartesian(magnitude, direction, rotation, 0);
//drive.mecanumDrive_Cartesian(joy1.getX(), joy1.getY(), joy1.getZ(), 0.0);
//drive.mecanumDrive_Cartesian(joy1.getX(), joy1.getY(), joy2.getX(), 180.0);
//Timer.delay(0.005);
// ... or for two 2-axis joysticks do this (Halo):
/*double forward = -joy1.getY(); // push joystick1 forward to go forward
double right = joy1.getX(); // push joystick1 to the right to strafe right
double clockwise = joy2.getX(); // push joystick2 to the right to rotate clockwise
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser2, 1, "Joy1 X:"+right+" Y:"+forward);
DriverStationLCD.getInstance().println(DriverStationLCD.Line.kUser4, 1, "Joy2 X:"+clockwise);
DriverStationLCD.getInstance().updateLCD();
// note: the above can only do 2 degrees of freedom at a time.
// Button1 selects Halo or Arcade.
// ... or any other driver interface scheme you like.
// K is a tuning constant for the rotate axis sensitivity.
// Start with K=0, and increase it very slowly (do not exceed K=1)
// to find the right value after youve got fwd/rev and strafe working:
double K = 0.1;
clockwise = K*clockwise;
// OPTIONAL. If desired, use the gyro angle for field-centric control.
// "theta" is the gyro angle, measured CCW from the zero reference:
double temp = forward*cos(theta) - right*sin(theta);
right = forward*sin(theta) + right*cos(theta);
forward = temp;
// Now apply the inverse kinematic tranformation
// to convert your vehicle motion command
// to 4 wheel speed commands:
double front_left = forward + clockwise + right;
double front_right = forward - clockwise - right;
double rear_left = forward + clockwise - right;
double rear_right = forward - clockwise + right;
// Finally, normalize the wheel speed commands
// so that no wheel speed command exceeds magnitude of 1:
double max = Math.abs(front_left);
if (Math.abs(front_right)>max) max = Math.abs(front_right);
if (Math.abs(rear_left)>max) max = Math.abs(rear_left);
if (Math.abs(rear_right)>max) max = Math.abs(rear_right);
if (max>1)
{front_left/=max; front_right/=max; rear_left/=max; rear_right/=max;}
// You're done. Send these four wheel commands to their respective wheels
FL.set(front_left);
BL.set(rear_left);
FR.set(front_right);
BR.set(rear_right);
*/
/*
magnitude = joy1.getY();
direction = joy1.getX();
rotation = joy2.getX();
drive.mecanumDrive_Polar(magnitude, direction, rotation);
Timer.delay(0.005);
*/
//Create "deadzone" for Y1
/*Y1 = joy1.getY();
X1 = joy1.getX();
X2 = joy2.getX();*/
*//*
/*
if(Math.abs(joy1.getY()) > threshold)
Y1 = joy1.getY();
else
Y1 = 0;
//Create "deadzone" for X1
if(Math.abs(joy1.getX()) > threshold)
X1 = joy1.getX();
else
X1 = 0;
//Create "deadzone" for X2
if(Math.abs(joy2.getX()) > threshold)
X2 = joy2.getX();
else
X2 = 0;
double max = 0.0;
if(Math.abs(Y1 - X2 - X1) > max)
max = Math.abs(Y1 - X2 - X1);
if(Math.abs(Y1 - X2 + X1) > max)
max = Math.abs(Y1 - X2 + X1);
if(Math.abs(Y1 + X2 + X1) > max)
max = Math.abs(Y1 + X2 + X1);
if(Math.abs(Y1 + X2 - X1) > max)
max = Math.abs(Y1 + X2 - X1);
//Remote Control Commands
FR.set((Y1 - X2 - X1)/max);
BR.set((Y1 - X2 + X1)/max);
FL.set((Y1 + X2 + X1)/max);
BL.set((Y1 + X2 - X1)/max);
*/
}
}
}
|
package org.hisp.dhis.android.core.enrollment;
import org.hisp.dhis.android.core.common.OrphanCleaner;
import org.hisp.dhis.android.core.common.State;
import org.hisp.dhis.android.core.data.database.DatabaseAdapter;
import org.hisp.dhis.android.core.event.Event;
import org.hisp.dhis.android.core.event.EventHandler;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(JUnit4.class)
public class EnrollmentHandlerShould {
@Mock
private EnrollmentStore enrollmentStore;
@Mock
private EventHandler eventHandler;
@Mock
private Enrollment enrollment;
@Mock
private Event event;
@Mock
private DatabaseAdapter databaseAdapter;
@Mock
private OrphanCleaner<Enrollment, Event> eventCleaner;
// object to test
private EnrollmentHandler enrollmentHandler;
@Before
public void setUp() throws Exception {
MockitoAnnotations.initMocks(this);
when(enrollment.uid()).thenReturn("test_enrollment_uid");
when(enrollment.events()).thenReturn(Collections.singletonList(event));
enrollmentHandler = new EnrollmentHandler(databaseAdapter, enrollmentStore, eventHandler, eventCleaner);
}
@Test
public void do_nothing_when_passing_null_argument() throws Exception {
enrollmentHandler.handle(null);
// verify that store or event handler is never called
verify(enrollmentStore, never()).delete(anyString());
verify(enrollmentStore, never()).update(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class), anyString()
);
verify(enrollmentStore, never()).insert(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class)
);
verify(eventHandler, never()).handle(any(Event.class));
verify(eventCleaner, never()).deleteOrphan(any(Enrollment.class), any(ArrayList.class));
}
@Test
public void invoke_only_delete_when_a_enrollment_is_set_as_deleted() throws Exception {
when(enrollment.deleted()).thenReturn(Boolean.TRUE);
enrollmentHandler.handle(Collections.singletonList(enrollment));
// verify that enrollment store is only invoked with delete
verify(enrollmentStore, times(1)).delete(anyString());
verify(enrollmentStore, never()).update(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class), anyString()
);
verify(enrollmentStore, never()).insert(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class)
);
// event handler should not be invoked
verify(eventHandler, never()).handle(any(Event.class));
verify(eventCleaner, times(1)).deleteOrphan(any(Enrollment.class), any(ArrayList.class));
}
@Test
public void invoke_only_update_when_handle_enrollment_inserted() throws Exception {
when(enrollmentStore.update(anyString(), any(Date.class), any(Date.class), anyString(), anyString(),
anyString(), anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class), anyString())
).thenReturn(1);
enrollmentHandler.handle(Collections.singletonList(enrollment));
// verify that enrollment store is only invoked with update
verify(enrollmentStore, times(1)).update(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class), anyString()
);
verify(enrollmentStore, never()).delete(anyString());
verify(enrollmentStore, never()).insert(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class)
);
// event handler should be invoked once
verify(eventHandler, times(1)).handleMany(any(ArrayList.class));
verify(eventCleaner, times(1)).deleteOrphan(any(Enrollment.class), any(ArrayList.class));
}
@Test
public void invoke_update_and_insert_when_handle_enrollment_not_inserted() throws Exception {
when(enrollmentStore.update(anyString(), any(Date.class), any(Date.class), anyString(), anyString(),
anyString(), anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class), anyString())
).thenReturn(0);
enrollmentHandler.handle(Collections.singletonList(enrollment));
// verify that enrollment store is only invoked with insert
verify(enrollmentStore, times(1)).insert(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class)
);
// verify that update is also invoked since we're trying to update before we insert
verify(enrollmentStore, times(1)).update(
anyString(), any(Date.class), any(Date.class), anyString(), anyString(), anyString(),
anyString(), any(Date.class), any(Date.class), anyBoolean(),
any(EnrollmentStatus.class), anyString(), anyString(),
anyString(), any(State.class), anyString()
);
// verify that delete is never invoked
verify(enrollmentStore, never()).delete(anyString());
// event handler should be invoked once
verify(eventHandler, times(1)).handleMany(any(ArrayList.class));
verify(eventCleaner, times(1)).deleteOrphan(any(Enrollment.class), any(ArrayList.class));
}
}
|
package org.jgroups.debug;
import org.jgroups.Address;
import org.jgroups.Event;
import org.jgroups.Message;
import org.jgroups.View;
import org.jgroups.stack.Protocol;
import org.jgroups.util.Queue;
import org.jgroups.util.QueueClosedException;
import java.util.HashMap;
import java.util.Iterator;
/**
* Tests one or more protocols independently. Look at org.jgroups.tests.FCTest for an example of how to use it.
* @author Bela Ban
* @version $Id: Simulator.java,v 1.2 2005/07/17 23:02:37 chrislott Exp $
*/
public class Simulator {
private Protocol[] protStack=null;
private ProtocolAdapter ad=new ProtocolAdapter();
private Receiver r=null;
private Protocol top=null, bottom=null;
private Queue send_queue=new Queue();
private Thread send_thread;
private Queue recv_queue=new Queue();
private Thread recv_thread;
/** HashMap from Address to Simulator. */
public static HashMap addrTable=new HashMap();
private Address local_addr=null;
private View view;
public interface Receiver {
void receive(Event evt);
}
public void setProtocolStack(Protocol[] stack) {
this.protStack=stack;
this.protStack[0].setUpProtocol(ad);
this.protStack[this.protStack.length-1].setDownProtocol(ad);
top=protStack[0];
bottom=this.protStack[this.protStack.length-1];
if(protStack.length > 1) {
for(int i=0; i < protStack.length; i++) {
Protocol p1=protStack[i];
Protocol p2=i+1 >= protStack.length? null : protStack[i+1];
if(p2 != null) {
p1.setDownProtocol(p2);
p2.setUpProtocol(p1);
}
}
}
}
public void setLocalAddress(Address addr) {
this.local_addr=addr;
}
public void setView(View v) {
this.view=v;
}
public void setReceiver(Receiver r) {
this.r=r;
}
public void send(Event evt) {
top.down(evt);
}
public void receive(Event evt) {
try {
recv_queue.add(evt);
}
catch(QueueClosedException e) {
}
}
public void start() throws Exception {
if(local_addr == null)
throw new Exception("local_addr has to be non-null");
if(protStack == null)
throw new Exception("protocol stack is null");
bottom.up(new Event(Event.SET_LOCAL_ADDRESS, local_addr));
if(view != null)
top.down(new Event(Event.VIEW_CHANGE, view));
send_thread=new Thread() {
public void run() {
Event evt;
while(send_thread != null) {
try {
evt=(Event)send_queue.remove();
if(evt.getType() == Event.MSG) {
Message msg=(Message)evt.getArg();
Address dst=msg.getDest();
if(msg.getSrc() == null)
((Message)evt.getArg()).setSrc(local_addr);
Simulator s;
if(dst == null) {
for(Iterator it=addrTable.values().iterator(); it.hasNext();) {
s=(Simulator)it.next();
s.receive(evt);
}
}
else {
s=(Simulator)addrTable.get(dst);
if(s != null)
s.receive(evt);
}
}
}
catch(QueueClosedException e) {
send_thread=null;
break;
}
}
}
};
send_thread.start();
recv_thread=new Thread() {
public void run() {
Event evt;
while(recv_thread != null) {
try {
evt=(Event)recv_queue.remove();
bottom.up(evt);
}
catch(QueueClosedException e) {
recv_thread=null;
break;
}
}
}
};
recv_thread.start();
}
public void stop() {
recv_thread=null;
recv_queue.close(false);
send_thread=null;
send_queue.close(false);
}
class ProtocolAdapter extends Protocol {
public String getName() {
return "ProtocolAdapter";
}
public void up(Event evt) {
if(r != null)
r.receive(evt);
}
/** send to unicast or multicast destination */
public void down(Event evt) {
try {
send_queue.add(evt);
}
catch(QueueClosedException e) {
}
}
}
}
|
package org.opentosca.toscana.core.transformation.logging;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.List;
import org.opentosca.toscana.core.BaseJUnitTest;
import ch.qos.logback.classic.Level;
import org.junit.Before;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
public class LogImplTest extends BaseJUnitTest {
private Logger logger = LoggerFactory.getLogger(LogImplTest.class);
private Log log;
private File logfile;
@Before
public void setUp() throws Exception {
log = new LogImpl(new File(tmpdir, "log"));
logger.info("Creating dummy log entries");
for (int index = 0; index < 100; index++) {
LogEntry logEntry = new LogEntry(index, String.format("Log-Message-%d", index), Level.DEBUG);
log.addLogEntry(logEntry);
}
logfile = new File(tmpdir, "log");
}
@Test
public void getAllLogEntries() throws Exception {
logger.info("Trying to retrieve complete log");
List<LogEntry> logs = log.getLogEntries(0);
logger.info("Checking length");
assertTrue(logs.size() == 100);
logger.info("Checking data");
for (int i = 0; i < logs.size(); i++) {
LogEntry e = logs.get(i);
assertEquals((String.format("Log-Message-%d", i)), e.getMessage());
}
logger.info("Done");
}
@Test
public void getPartialLogEntries() throws Exception {
logger.info("Trying to log from index 50");
List<LogEntry> logs = log.getLogEntries(50);
logger.info("Checking length");
assertTrue(logs.size() == 50);
logger.info("Checking data");
for (int i = 50; i < logs.size(); i++) {
LogEntry e = logs.get(i);
assertEquals(String.format("Log-Message-%d", i), e.getMessage());
}
logger.info("Done");
}
@Test
public void getLogsFromOuterBound() throws Exception {
logger.info("Trying to get logs from index 100");
assertSame(0, log.getLogEntries(101).size());
logger.info("Done");
}
@Test
public void getFirstTenLogEntries() throws Exception {
logger.info("Trying to log from index 0 to 10");
List<LogEntry> logs = log.getLogEntries(0, 9);
logger.info("Checking length");
assertSame(10, logs.size());
logger.info("Checking data");
for (int i = 0; i < logs.size(); i++) {
LogEntry e = logs.get(i);
assertEquals(String.format("Log-Message-%d", i), e.getMessage());
}
logger.info("Done");
}
@Test(expected = IllegalArgumentException.class)
public void getLogEntriesWithInvalidBounds() throws Exception {
logger.info("Trying to log from index 0 to 10");
log.getLogEntries(100, 10);
}
@Test
public void readLogEntriesFromDisk() throws IOException {
log = new LogImpl(logfile);
Logger testLogger = log.getLogger("my-test-context");
testLogger.info("produce a first valid log line");
Log testLog = new LogImpl(logfile);
List<LogEntry> entries = testLog.getLogEntries(0);
assertEquals(1, entries.size());
testLogger.info("produce a second valid log line");
testLog = new LogImpl(logfile);
entries = testLog.getLogEntries(0);
assertEquals(2, entries.size());
}
@Test
public void readLogEntriesFromDiskSetLevelCorrectly() {
log = new LogImpl(logfile);
Logger testLogger = log.getLogger("my-test-context");
String[] messages = {"info message", "warn message", "error message"};
testLogger.info(messages[0]);
testLogger.warn(messages[1]);
testLogger.error(messages[2]);
Log readFromDiskLog = new LogImpl(logfile);
List<LogEntry> logEntries = readFromDiskLog.getLogEntries(0);
assertEquals(3, logEntries.size());
assertTrue(logEntries.get(0).getMessage().contains(messages[0]));
assertEquals(Level.INFO.toString(), logEntries.get(0).getLevel());
assertTrue(logEntries.get(1).getMessage().contains(messages[1]));
assertEquals(Level.WARN.toString(), logEntries.get(1).getLevel());
assertTrue(logEntries.get(2).getMessage().contains(messages[2]));
assertEquals(Level.ERROR.toString(), logEntries.get(2).getLevel());
}
@Test
public void readLogEntriesFromDiskSetTimestampCorrectly() throws InterruptedException {
log = new LogImpl(logfile);
Logger testLogger = log.getLogger(getClass());
testLogger.info("testing timestamps now");
long expected = log.getLogEntries(0).get(0).getTimestamp();
Log testLog = new LogImpl(logfile);
long result = testLog.getLogEntries(0).get(0).getTimestamp();
// the delta is necessary because in rare cases there was an offset of 1ms
// this offset is acceptable and not worth fixing
assertEquals(expected, result, 1);
}
@Test
public void logReadsLogfileWithIllegalLogsAndIgnoresThem() throws IOException {
PrintWriter pw = new PrintWriter(new BufferedWriter(new FileWriter(logfile)));
pw.write("log level which does not adhere to logging format");
log = new LogImpl(logfile);
List<LogEntry> entries = log.getLogEntries(0);
assertEquals(0, entries.size());
}
@Test
public void logReadsLogfileWithStacktraces() {
log = new LogImpl(logfile);
Logger testLogger = log.getLogger(getClass());
try {
throw new ArithmeticException();
} catch (ArithmeticException e) {
testLogger.error("printing stacktrace to log", e);
}
Log readFromDiskLog = new LogImpl(logfile);
List<LogEntry> logEntries = readFromDiskLog.getLogEntries(0);
for (int index = 0; index < logEntries.size(); index++) {
LogEntry entry = logEntries.get(index);
assertNotEquals(0, entry.getTimestamp());
assertEquals(index, entry.getIndex());
assertEquals(Level.ERROR.toString(), entry.getLevel());
}
}
}
|
package net.java.sip.communicator.impl.gui.main.contactlist.contactsource;
import java.util.*;
import java.util.regex.*;
import net.java.sip.communicator.impl.gui.*;
import net.java.sip.communicator.service.contactlist.*;
import net.java.sip.communicator.service.contactsource.*;
import net.java.sip.communicator.service.protocol.*;
/**
* The <tt>ProtocolContactSourceServiceImpl</tt>
*
* @author Yana Stamcheva
*/
public class ProtocolContactSourceServiceImpl
implements ContactSourceService
{
/**
* The protocol provider, providing the contacts.
*/
private final ProtocolProviderService protocolProvider;
/**
* The operation set class, we use to filter the capabilities of the
* contacts.
*/
private final Class<? extends OperationSet> opSetClass;
/**
* The <tt>MetaContactListService</tt>, providing the meta contact list.
*/
MetaContactListService metaContactListService
= GuiActivator.getContactListService();
/**
* The <tt>List</tt> of <tt>ProtocolContactQuery</tt> instances
* which have been started and haven't stopped yet.
*/
private final List<ProtocolCQuery> queries
= new LinkedList<ProtocolCQuery>();
/**
* Creates an instance of <tt>ProtocolContactSourceServiceImpl</tt>.
*
* @param protocolProvider the protocol provider which is the contact source
* @param opSetClass the <tt>OperationSet</tt> class that is supported by
* source contacts
*/
public ProtocolContactSourceServiceImpl(
ProtocolProviderService protocolProvider,
Class<? extends OperationSet> opSetClass)
{
this.protocolProvider = protocolProvider;
this.opSetClass = opSetClass;
}
/**
* Returns the type of this contact source.
*
* @return the type of this contact source
*/
public int getType()
{
return DEFAULT_TYPE;
}
/**
* Returns a user-friendly string that identifies this contact source.
*
* @return the display name of this contact source
*/
public String getDisplayName()
{
return GuiActivator.getResources().getI18NString("service.gui.CONTACTS")
+ " " + protocolProvider.getAccountID().getDisplayName();
}
/**
* Creates query for the given <tt>searchPattern</tt>.
*
* @param queryString the string to search for
* @return the created query
*/
public ContactQuery createContactQuery(String queryString)
{
return createContactQuery(queryString, -1);
}
/**
* Creates query for the given <tt>searchPattern</tt>.
*
* @param queryString the string to search for
* @param contactCount the maximum count of result contacts
* @return the created query
*/
public ContactQuery createContactQuery( String queryString,
int contactCount)
{
if (queryString == null)
queryString = "";
ProtocolCQuery contactQuery
= new ProtocolCQuery(queryString, contactCount);
synchronized (queries)
{
queries.add(contactQuery);
}
return contactQuery;
}
/**
* Removes query from the list.
* @param contactQuery the query
*/
public synchronized void removeQuery(ContactQuery contactQuery)
{
if (queries.remove(contactQuery))
queries.notify();
}
/**
* The <tt>ProtocolCQuery</tt> performing the query for this contact source.
*/
private class ProtocolCQuery
extends AsyncContactQuery<ProtocolContactSourceServiceImpl>
{
/**
* The maximum number of contacts to return as result.
*/
private int contactCount;
/**
* The query string used for filtering the results.
*/
private final String queryString;
/**
* Creates an instance of <tt>ProtocolCQuery</tt>.
*
* @param queryString the query string
* @param contactCount the maximum number of contacts to return as
* result
*/
public ProtocolCQuery(String queryString, int contactCount)
{
super(ProtocolContactSourceServiceImpl.this,
Pattern.compile(queryString, Pattern.CASE_INSENSITIVE
| Pattern.LITERAL), true);
this.queryString = queryString;
this.contactCount = contactCount;
}
/**
* {@inheritDoc}
*
* Always returns <tt>false</tt>.
*/
@Override
protected boolean phoneNumberMatches(String phoneNumber)
{
return false;
}
@Override
public void run()
{
Iterator<MetaContact> contactListIter
= metaContactListService.findAllMetaContactsForProvider(
protocolProvider);
while (contactListIter.hasNext())
{
MetaContact metaContact = contactListIter.next();
if (getStatus() == QUERY_CANCELED)
return;
this.addResultContact(metaContact);
}
if (getStatus() != QUERY_CANCELED)
setStatus(QUERY_COMPLETED);
}
@Override
public synchronized void start()
{
boolean queryHasStarted = false;
try
{
super.start();
queryHasStarted = true;
}
finally
{
if (!queryHasStarted)
{
getContactSource().removeQuery(this);
}
}
}
/**
* Adds the result for the given group.
*
* @param metaContact the metaContact, which child protocol contacts
* we'll be adding to the result
*/
private void addResultContact(MetaContact metaContact)
{
Iterator<Contact> contacts
= metaContact.getContactsForProvider(protocolProvider);
while (contacts.hasNext())
{
if (getStatus() == QUERY_CANCELED)
return;
if(contactCount > 0 && getQueryResultCount() > contactCount)
break;
Contact contact = contacts.next();
String contactAddress = contact.getAddress();
String contactDisplayName = contact.getDisplayName();
String queryLowerCase = queryString.toLowerCase();
if (queryString == null
|| queryString.length() <= 0
|| metaContact.getDisplayName().toLowerCase().contains(
queryLowerCase)
|| contactAddress.toLowerCase().contains(queryLowerCase)
|| contactDisplayName.toLowerCase().contains(queryLowerCase))
{
ContactDetail contactDetail
= new ContactDetail(contactAddress);
List<Class<? extends OperationSet>> supportedOpSets
= new ArrayList<Class<? extends OperationSet>>();
supportedOpSets.add(opSetClass);
contactDetail.setSupportedOpSets(supportedOpSets);
List<ContactDetail> contactDetails
= new ArrayList<ContactDetail>();
contactDetails.add(contactDetail);
SortedGenericSourceContact sourceContact
= new SortedGenericSourceContact(
this,
ProtocolContactSourceServiceImpl.this,
contactDisplayName,
contactDetails);
if (!contactAddress.equals(contactDisplayName))
sourceContact.setDisplayDetails(contactAddress);
sourceContact.setImage(metaContact.getAvatar());
sourceContact.setPresenceStatus(
contact.getPresenceStatus());
sourceContact.setContactAddress(contactAddress);
addQueryResult(sourceContact);
}
}
}
}
/**
* Returns the index of the contact source in the result list.
*
* @return the index of the contact source in the result list
*/
public int getIndex()
{
return 1;
}
}
|
package org.springframework.cloud.deployer.spi.openshift.maven;
import static org.junit.Assert.assertNotNull;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import org.apache.maven.project.MavenProject;
import org.junit.Ignore;
import org.junit.Test;
import org.springframework.cloud.deployer.resource.maven.MavenProperties;
import org.springframework.cloud.deployer.resource.maven.MavenResource;
public class MavenResourceProjectExtractorTest {
private MavenResourceProjectExtractor mavenResourceProjectExtractor = new MavenResourceProjectExtractor();
@Test
@Ignore("The artifact is not resolvable on the Travis build. Need to fix.")
public void extractMavenProject() throws Exception {
copy("src/test/resources/test-app-1.0-SNAPSHOT.pom",
"target/.m2/repository/org/test/test-app/1.0-SNAPSHOT");
copy("src/test/resources/test-app-1.0-SNAPSHOT.jar",
"target/.m2/repository/org/test/test-app/1.0-SNAPSHOT");
System.setProperty("user.home", new File("target").getAbsolutePath());
MavenProject mavenProject = mavenResourceProjectExtractor.extractMavenProject(
MavenResource.parse("org.test:test-app:1.0-SNAPSHOT"),
new MavenProperties());
assertNotNull(mavenProject);
}
private void copy(String from, String to) throws IOException {
Path source = new File(from).toPath().toAbsolutePath();
Path destination = new File(to).toPath().toAbsolutePath();
if (!Files.exists(destination)) {
Files.createDirectories(destination);
}
Files.copy(source,
new File(destination.toAbsolutePath().toString(),
source.getFileName().toString()).toPath(),
StandardCopyOption.REPLACE_EXISTING);
}
}
|
package org.jgroups.protocols;
import org.jgroups.*;
import org.jgroups.annotations.*;
import org.jgroups.blocks.LazyRemovalSet;
import org.jgroups.stack.IpAddress;
import org.jgroups.stack.Protocol;
import org.jgroups.util.*;
import org.jgroups.util.UUID;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.*;
@Experimental
@MBean(description="RELAY protocol")
public class RELAY extends Protocol {
@Property(description="Properties of the bridge cluster (e.g. tcp.xml)")
protected String bridge_props=null;
@Property(description="Name of the bridge cluster")
protected String bridge_name="bridge-cluster";
// @Property(description="If true, messages are relayed asynchronously, ie. via submission of a task to the timer thread pool")
// protected boolean async=false;
@Property(description="If set to false, don't perform relaying. Used e.g. for backup clusters; " +
"unidirectional replication from one cluster to another, but not back. Can be changed at runtime")
protected boolean relay=true;
@Property(description="Drops views received from below and instead generates global views and passes them up. " +
"A global view consists of the local view and the remote view, ordered by view ID. If true, no protocol" +
"which requires (local) views can sit on top of RELAY")
protected boolean present_global_views=true;
@Property(description="Max size of the cache for remote addresses")
protected int cache_size=200;
protected Address local_addr;
@ManagedAttribute
protected volatile boolean is_coord=false;
protected volatile Address coord=null;
/** The bridge between the two local clusters, usually based on a TCP config */
protected JChannel bridge;
/** The view of the local cluster */
protected View local_view;
/** The view of the bridge cluster, usually consists of max 2 nodes */
protected View bridge_view;
/** The view of the remote cluster, typically all members are ProxyAddresses */
protected View remote_view;
/** The combined view of local and remote cluster */
protected View global_view;
/** To generate new global views */
protected long global_view_id=0;
protected TimeScheduler timer;
protected LazyRemovalSet<Address> remote_cache;
@ManagedOperation
public void setRelay(boolean relay) {
this.relay=relay;
}
@ManagedAttribute
public String getLocalView() {
return local_view != null? local_view.toString() : "n/a";
}
@ManagedAttribute
public String getBridgeView() {
return bridge_view != null? bridge_view.toString() : "n/a";
}
@ManagedAttribute
public String getRemoteView() {
return remote_view != null? remote_view.toString() : "n/a";
}
@ManagedAttribute
public String getGlobalView() {
return global_view != null? global_view.toString() : "n/a";
}
@ManagedOperation(description="Prints the contents of the remote cache")
public String printRemoteCache() {
return remote_cache.printCache(new LazyRemovalSet.Printable<Address>() {
public String print(Address val) {
return val.toString();
}
});
}
@ManagedOperation(description="Evicts all elements in the remote cache which are marked as removable")
public void evictRemoteCache() {
remote_cache.removeMarkedElements(false);
}
public void init() throws Exception {
super.init();
timer=getTransport().getTimer();
remote_cache=new LazyRemovalSet<Address>(cache_size, 300000);
}
public void stop() {
Util.close(bridge);
}
public Object down(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
// forward non local destinations to the coordinator, to relay to the remote cluster
// if(dest instanceof ProxyAddress || !local_view.containsMember(dest)) {
if(remote_cache.contains(dest)) {
forwardToCoord(msg);
return null;
}
break;
case Event.VIEW_CHANGE:
handleView((View)evt.getArg());
break;
case Event.DISCONNECT:
Util.close(bridge);
break;
case Event.SET_LOCAL_ADDRESS:
local_addr=(Address)evt.getArg();
break;
case Event.GET_PHYSICAL_ADDRESS:
// fix to prevent exception by JBossAS, which checks whether a physical
// address is present and throw an ex if not
// Remove this when the AS code removes that check
PhysicalAddress addr=(PhysicalAddress)down_prot.down(evt);
if(addr == null)
addr=new IpAddress(6666);
return addr;
}
return down_prot.down(evt);
}
public Object up(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
RelayHeader hdr=(RelayHeader)msg.getHeader(getId());
if(hdr != null) {
switch(hdr.type) {
case DISSEMINATE:
Message copy=msg.copy();
if(hdr.original_sender != null)
copy.setSrc(hdr.original_sender);
return up_prot.up(new Event(Event.MSG, copy));
case FORWARD:
if(is_coord)
forward(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
break;
case VIEW:
return installView(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
case BROADCAST_VIEW:
sendViewOnLocalCluster(msg.getSrc(), remote_view, global_view, false);
break;
default:
throw new IllegalArgumentException(hdr.type + " is not a valid type");
}
return null;
}
if(is_coord && relay && (dest == null || dest.isMulticastAddress()) && !msg.isFlagSet(Message.NO_RELAY)) {
Message tmp=msg.copy(true, Global.BLOCKS_START_ID); // we only copy headers from building blocks
try {
byte[] buf=Util.streamableToByteBuffer(tmp);
forward(buf, 0, buf.length);
}
catch(Exception e) {
log.warn("failed relaying message", e);
}
}
break;
case Event.VIEW_CHANGE:
handleView((View)evt.getArg()); // already sends up new view if needed
if(present_global_views)
return null;
else
break;
}
return up_prot.up(evt);
}
protected void handleView(final View view) {
local_view=view;
coord=view.getMembers().firstElement();
boolean create_bridge=false;
boolean is_new_coord=Util.isCoordinator(view, local_addr);
if(is_coord) {
if(!is_new_coord) {
if(log.isTraceEnabled())
log.trace("I'm not coordinator anymore, closing the channel");
Util.close(bridge);
is_coord=false;
bridge=null;
}
}
else {
if(is_new_coord)
is_coord=create_bridge=true;
}
if(is_coord) {
if(create_bridge) {
createBridge();
Message msg=new Message();
msg.putHeader(id, RelayHeader.create(RELAY.RelayHeader.Type.BROADCAST_VIEW));
try {
bridge.send(msg);
}
catch(Exception e) {
}
}
sendViewToRemote(ViewData.create(view, null), false);
if(create_bridge && bridge.getView().size() > 1) {
;
}
else {
sendViewOnLocalCluster(null, remote_view, generateGlobalView(view, remote_view), true);
}
}
}
protected Object installView(byte[] buf, int offset, int length) {
try {
ViewData data=(ViewData)Util.streamableFromByteBuffer(ViewData.class, buf, offset, length);
if(data.uuids != null)
UUID.add(data.uuids);
remote_view=data.remote_view;
if(data.remote_view != null) {
remote_cache.add(data.remote_view.getMembers());
remote_cache.retainAll(data.remote_view.getMembers());
}
if(global_view == null || (data.global_view != null &&!global_view.equals(data.global_view))) {
global_view=data.global_view;
synchronized(this) {
if(data.global_view.getVid().getId() > global_view_id)
global_view_id=data.global_view.getViewId().getId();
}
if(present_global_views)
return up_prot.up(new Event(Event.VIEW_CHANGE, global_view));
}
}
catch(Exception e) {
log.error("failed installing view", e);
}
return null;
}
/** Forwards the message across the TCP link to the other local cluster */
protected void forward(byte[] buffer, int offset, int length) {
Message msg=new Message(null, null, buffer, offset, length);
msg.putHeader(id, new RelayHeader(RelayHeader.Type.FORWARD));
if(bridge != null) {
try {
bridge.send(msg);
}
catch(Throwable t) {
log.error("failed forwarding message over bridge", t);
}
}
}
/** Wraps the message annd sends it to the current coordinator */
protected void forwardToCoord(Message msg) {
Message tmp=msg.copy(true, Global.BLOCKS_START_ID); // // we only copy headers from building blocks
if(tmp.getSrc() == null)
tmp.setSrc(local_addr);
Address dest=tmp.getDest();
if(dest instanceof ProxyAddress) {
ProxyAddress dst=(ProxyAddress)tmp.getDest();
tmp.setDest(dst.getOriginalAddress());
}
try {
byte[] buf=Util.streamableToByteBuffer(tmp);
if(coord != null) {
tmp=new Message(coord, null, buf, 0, buf.length); // reusing tmp is OK here ...
tmp.putHeader(id, new RelayHeader(RelayHeader.Type.FORWARD));
down_prot.down(new Event(Event.MSG, tmp));
}
}
catch(Exception e) {
log.error("failed forwarding unicast message to coord", e);
}
}
protected void sendViewToRemote(ViewData view_data, boolean use_seperate_thread) {
try {
if(bridge != null && bridge.isConnected()) {
byte[] buf=Util.streamableToByteBuffer(view_data);
final Message msg=new Message(null, null, buf);
msg.putHeader(id, RelayHeader.create(RelayHeader.Type.VIEW));
if(use_seperate_thread) {
timer.execute(new Runnable() {
public void run() {
try {
bridge.send(msg);
}
catch(Exception e) {
log.error("failed sending view to remote", e);
}
}
});
}
else
bridge.send(msg);
}
}
catch(Exception e) {
log.error("failed sending view to remote", e);
}
}
protected View generateGlobalView(View local_view, View remote_view) {
List<View> views=new ArrayList<View>(2);
if(local_view != null) views.add(local_view);
if(remote_view != null) views.add(remote_view);
Collections.sort(views, new Comparator<View>() {
public int compare(View v1, View v2) {
ViewId vid1=v1.getViewId(), vid2=v2.getViewId();
Address creator1=vid1.getCoordAddress(), creator2=vid2.getCoordAddress();
int rc=creator1.compareTo(creator2);
if(rc != 0)
return rc;
long id1=vid1.getId(), id2=vid2.getId();
return id1 > id2 ? 1 : id1 < id2? -1 : 0;
}
});
Collection<Address> combined_members=new LinkedList<Address>();
for(View view: views)
combined_members.addAll(view.getMembers());
long new_view_id;
synchronized(this) {
new_view_id=global_view_id++;
}
return new View(local_addr, new_view_id, combined_members);
}
protected void createBridge() {
try {
if(log.isTraceEnabled())
log.trace("I'm the coordinator, creating a channel (props=" + bridge_props + ", cluster_name=" + bridge_name + ")");
bridge=new JChannel(bridge_props);
bridge.setOpt(Channel.LOCAL, false); // don't receive my own messages
bridge.setReceiver(new Receiver());
bridge.connect(bridge_name);
}
catch(ChannelException e) {
log.error("failed creating bridge channel (props=" + bridge_props + ")", e);
}
}
protected class Receiver extends ReceiverAdapter {
public void receive(Message msg) {
Address sender=msg.getSrc();
if(bridge.getAddress().equals(sender)) // discard my own messages
return;
RelayHeader hdr=(RelayHeader)msg.getHeader(id);
switch(hdr.type) {
case DISSEMINATE: // should not occur here, but we'll ignore it anyway
break;
case FORWARD:
sendOnLocalCluster(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
break;
case VIEW:
try {
ViewData data=(ViewData)Util.streamableFromByteBuffer(ViewData.class, msg.getRawBuffer(),
msg.getOffset(), msg.getLength());
// replace addrs with proxies
if(data.remote_view != null) {
List<Address> mbrs=new LinkedList<Address>();
for(Address mbr: data.remote_view.getMembers()) {
mbrs.add(new ProxyAddress(local_addr, mbr));
}
data.remote_view=new View(data.remote_view.getViewId(), mbrs);
}
data.global_view=generateGlobalView(local_view, data.remote_view);
sendViewOnLocalCluster(null, data, false);
}
catch(Exception e) {
log.error("failed unmarshalling view from remote cluster", e);
}
break;
case BROADCAST_VIEW: // no-op
// our local view is seen as the remote view on the other side !
sendViewToRemote(ViewData.create(local_view, null), true);
break;
default:
throw new IllegalArgumentException(hdr.type + " is not a valid type");
}
}
public void viewAccepted(View view) {
if(bridge_view == null)
bridge_view=view;
else {
if(!bridge_view.getVid().equals(view.getViewId())) {
bridge_view=view;
if(view.size() == 1 && bridge != null && bridge.isConnected() &&
view.getMembers().firstElement().equals(bridge.getAddress())) {
remote_view=null;
View new_global_view=generateGlobalView(local_view, null);
sendViewOnLocalCluster(null, null, new_global_view, false);
}
else {
// our local view is seen as the remote view on the other side !
// sendViewToRemote(ViewData.create(local_view, null), true);
}
}
}
}
}
protected void sendOnLocalCluster(byte[] buf, int offset, int length) {
try {
Message msg=(Message)Util.streamableFromByteBuffer(Message.class, buf, offset, length);
Address sender=msg.getSrc();
ProxyAddress proxy_sender=new ProxyAddress(local_addr, sender);
// msg.setSrc(proxy_sender);
// set myself to be the sender
msg.setSrc(local_addr);
// later, in RELAY, we'll take the proxy_sender from the header and make it the sender
msg.putHeader(id, RelayHeader.createDisseminateHeader(proxy_sender));
if(log.isTraceEnabled())
log.trace("received msg from " + sender + ", passing down the stack with dest=" +
msg.getDest() + " and src=" + msg.getSrc());
down_prot.down(new Event(Event.MSG, msg));
}
catch(Exception e) {
log.error("failed sending on local cluster", e);
}
}
protected void sendViewOnLocalCluster(Address dest, View remote_view, View global_view, boolean use_seperate_thread) {
sendViewOnLocalCluster(dest, ViewData.create(remote_view, global_view), use_seperate_thread);
}
protected void sendViewOnLocalCluster(Address dest, ViewData data, boolean use_seperate_thread) {
try {
final Message view_msg=new Message(dest, null, Util.streamableToByteBuffer(data));
view_msg.putHeader(id, RelayHeader.create(RelayHeader.Type.VIEW));
if(use_seperate_thread) {
timer.execute(new Runnable() {
public void run() {
down_prot.down(new Event(Event.MSG, view_msg));
}
});
}
else
down_prot.down(new Event(Event.MSG, view_msg));
}
catch(Exception e) {
log.error("failed sending view to local cluster", e);
}
}
public static class RelayHeader extends Header {
public static enum Type {DISSEMINATE, FORWARD, VIEW, BROADCAST_VIEW};
protected Type type;
protected Address original_sender; // with DISSEMINATE
public RelayHeader() {
}
private RelayHeader(Type type) {
this.type=type;
}
public static RelayHeader create(Type type) {
return new RelayHeader(type);
}
public static RelayHeader createDisseminateHeader(Address original_sender) {
RelayHeader retval=new RelayHeader(Type.DISSEMINATE);
retval.original_sender=original_sender;
return retval;
}
public int size() {
int retval=Global.BYTE_SIZE; // type
switch(type) {
case DISSEMINATE:
retval+=Util.size(original_sender);
break;
case FORWARD:
case VIEW:
case BROADCAST_VIEW:
break;
}
return retval;
}
public void writeTo(DataOutputStream out) throws IOException {
out.writeByte(type.ordinal());
switch(type) {
case DISSEMINATE:
Util.writeAddress(original_sender, out);
break;
case FORWARD:
case VIEW:
case BROADCAST_VIEW:
break;
}
}
public void readFrom(DataInputStream in) throws IOException, IllegalAccessException, InstantiationException {
type=Type.values()[in.readByte()];
switch(type) {
case DISSEMINATE:
original_sender=Util.readAddress(in);
break;
case FORWARD:
case VIEW:
case BROADCAST_VIEW:
break;
}
}
public String toString() {
StringBuilder sb=new StringBuilder(type.toString());
switch(type) {
case DISSEMINATE:
sb.append(" (original sender=" + original_sender + ")");
break;
case FORWARD:
case VIEW:
case BROADCAST_VIEW:
break;
}
return sb.toString();
}
}
/** Contains local and remote views, and UUID information */
protected static class ViewData implements Streamable {
protected View remote_view;
protected View global_view;
protected Map<Address,String> uuids;
public ViewData() {
}
private ViewData(View remote_view, View global_view, Map<Address,String> uuids) {
this.remote_view=remote_view;
this.global_view=global_view;
this.uuids=uuids;
}
public static ViewData create(View remote_view, View global_view) {
Map<Address,String> tmp=UUID.getContents();
View rv=remote_view != null? remote_view.copy() : null;
View gv=global_view != null? global_view.copy() : null;
return new ViewData(rv, gv, tmp);
}
public void writeTo(DataOutputStream out) throws IOException {
Util.writeStreamable(remote_view, out);
Util.writeStreamable(global_view, out);
out.writeInt(uuids.size());
for(Map.Entry<Address,String> entry: uuids.entrySet()) {
Util.writeAddress(entry.getKey(), out);
out.writeUTF(entry.getValue());
}
}
public void readFrom(DataInputStream in) throws IOException, IllegalAccessException, InstantiationException {
remote_view=(View)Util.readStreamable(View.class, in);
global_view=(View)Util.readStreamable(View.class, in);
int size=in.readInt();
uuids=new HashMap<Address,String>();
for(int i=0; i < size; i++) {
Address addr=Util.readAddress(in);
String name=in.readUTF();
uuids.put(addr, name);
}
}
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append("global_view: " + global_view).append(", remote_view: ").append(remote_view);
return sb.toString();
}
}
}
|
//package goplaces.selenium;
//import static org.junit.Assert.*;
//import java.io.File;
//import java.io.IOException;
//import org.apache.commons.io.FileUtils;
//import org.junit.After;
//import org.junit.Before;
//import org.junit.Test;
//import org.openqa.selenium.By;
//import org.openqa.selenium.OutputType;
//import org.openqa.selenium.TakesScreenshot;
//import org.openqa.selenium.WebDriver;
//import org.openqa.selenium.WebElement;
//import org.openqa.selenium.chrome.ChromeDriver;
//import com.thoughtworks.selenium.ScreenshotListener;
//public class GoPlacesSeleniumTest {
// private String baseUrl;
// private WebDriver driver;
// private ScreenshotHelper screenshotHelper;
// @Before
// public void openBrowser() {
// System.setProperty("webdriver.chrome.driver", "chromedriver");
// driver = new ChromeDriver();
// screenshotHelper = new ScreenshotHelper();
// @After
// public void saveScreenshotAndCloseBrowser() throws IOException {
//// screenshotHelper.saveScreenshot("screenshot.png");
// driver.quit();
// @Test
// public void pageTitleAfterSearchShouldBeginWithDrupal() throws InterruptedException {
// assertEquals("Go Places", driver.getTitle());
// WebElement findInitialRouteTitle = driver.findElement(By.cssSelector(".places-form h2"));
// assertEquals("1. Find the initial route", findInitialRouteTitle.getText());
// WebElement insertWaypointsTitle = driver.findElement(By.cssSelector(".waypoints-form h2"));
// assertEquals("2. Insert the waypoint categories", insertWaypointsTitle.getText());
// WebElement googleMap = driver.findElement(By.cssSelector("#map .gm-style"));
// assertTrue(googleMap.isDisplayed());
// WebElement originPlace = driver.findElement(By.cssSelector(".js-places-form-origin"));
// assertEquals("Origin place", originPlace.getAttribute("placeholder"));
// WebElement destinationPlace = driver.findElement(By.cssSelector(".js-places-form-destination"));
// assertEquals("Destination place", destinationPlace.getAttribute("placeholder"));
// originPlace.sendKeys("6636 Del Playa dr, Isla Vista");
// destinationPlace.sendKeys("Santa Barbara");
// WebElement findInitialRouteSubmitBtn = driver.findElement(By.cssSelector(".js-places-form-submit"));
// findInitialRouteSubmitBtn.click();
// Thread.sleep(5000);
// WebElement firstWaypointCategoryTextField = driver.findElement(By.cssSelector(".waypoints-form .js-waypoint-form-textfield"));
// WebElement waypointCategoriesSubmitBtm = driver.findElement(By.cssSelector(".js-waypoints-form-submit-btn"));
// waypointCategoriesSubmitBtm.click();
//// Thread.sleep(10000);
// // We should assert presence of google map markers here
// // Find a way to retrieve the css selectors and interact with them
// // Then we have to test the presence of the submit btn and click on it to submit the finalized route
// private class ScreenshotHelper {
// public void saveScreenshot(String screenshotFileName) throws IOException {
// File screenshot = ((TakesScreenshot) driver).getScreenshotAs(OutputType.FILE);
// FileUtils.copyFile(screenshot, new File(screenshotFileName));
|
// $Id: TOTAL.java,v 1.2 2003/12/04 13:33:53 igeorg Exp $
package org.jgroups.protocols;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.*;
import org.jgroups.*;
import org.jgroups.util.*;
import org.jgroups.stack.*;
import org.jgroups.log.Trace;
/**
* Implements the total ordering layer using a message sequencer
* <p>
*
* The protocol guarantees that all bcast sent messages will be delivered in
* the same order to all members. For that it uses a sequencer which assignes
* monotonically increasing sequence ID to broadcasts. Then all group members
* deliver the bcasts in ascending sequence ID order.
* <p>
* <ul>
* <li>
* When a bcast message comes down to this layer, it is placed in the pending
* down queue. A bcast request is sent to the sequencer.</li>
* <li>
* When the sequencer receives a bcast request, it creates a bcast reply
* message and assigns to it a monotonically increasing seqID and sends it back
* to the source of the bcast request.</li>
* <li>
* When a broadcast reply is received, the corresponding bcast message is
* assigned the received seqID. Then it is broadcasted.</li>
* <li>
* Received bcasts are placed in the up queue. The queue is sorted according
* to the seqID of the bcast. Any message at the head of the up queue with a
* seqID equal to the next expected seqID is delivered to the layer above.</li>
* <li>
* Unicast messages coming from the layer below are forwarded above.</li>
* <li>
* Unicast messages coming from the layer above are forwarded below.</li>
* </ul>
* <p>
* <i>Please note that once a <code>BLOCK_OK</code> is acknowledged messages
* coming from above are discarded!</i> Either the application must stop
* sending messages when a <code>BLOCK</code> event is received from the
* channel or a QUEUE layer should be placed above this one. Received messages
* are still delivered above though.
* <p>
* bcast requests are retransmitted periodically until a bcast reply is
* received. In case a BCAST_REP is on its way during a BCAST_REQ
* retransmission, then the next BCAST_REP will be to a non-existing
* BCAST_REQ. So, a nulll BCAST message is sent to fill the created gap in
* the seqID of all members.
*
* @author i.georgiadis@doc.ic.ac.uk
*/
public class TOTAL extends Protocol {
/**
* The header processed by the TOTAL layer and intended for TOTAL
* inter-stack communication
*/
public static class Header extends org.jgroups.Header {
// Header types
/** Null value for the tag */
public static final int NULL_TYPE = -1;
/** Request to broadcast by the source */
public static final int REQ = 0;
/** Reply to broadcast request. */
public static final int REP = 1;
/** Unicast message */
public static final int UCAST = 2;
/** Broadcast Message */
public static final int BCAST = 3;
/** The header's type tag */
public int type;
/**
* The ID used by the message source to match replies from the
* sequencer
*/
public long localSeqID;
/** The ID imposing the total order of messages */
public long seqID;
/**
* used for externalization
*/
public Header() {}
public Header(int type, long localSeqID, long seqID) { super();
switch(type) {
case REQ:
case REP:
case UCAST:
case BCAST: this.type = type; break;
default:
this.type = NULL_TYPE;
throw new IllegalArgumentException("type");
}
this.localSeqID = localSeqID;
this.seqID = seqID;
}
/**
* For debugging purposes
*/
public String toString() {
StringBuffer buffer = new StringBuffer();
String typeName;
buffer.append("[TOTAL.Header");
switch(type) {
case REQ: typeName = "REQ"; break;
case REP: typeName = "REP"; break;
case UCAST: typeName = "UCAST"; break;
case BCAST: typeName = "BCAST"; break;
case NULL_TYPE: typeName = "NULL_TYPE"; break;
default: typeName = ""; break;
}
buffer.append(", type=" + typeName);
buffer.append(", " + "localID=" + localSeqID);
buffer.append(", " + "seqID=" + seqID);
buffer.append("]");
return(buffer.toString());
}
/**
* Manual serialization
*/
public void writeExternal(ObjectOutput out) throws IOException {
out.writeInt(type);
out.writeLong(localSeqID);
out.writeLong(seqID);
}
/**
* Manual deserialization
*/
public void readExternal(ObjectInput in) throws IOException,
ClassNotFoundException {
type=in.readInt();
localSeqID=in.readLong();
seqID=in.readLong();
}
}
/**
* The retransmission listener - It is called by the
* <code>AckSenderWindow</code> when a retransmission should occur
*/
private class Command implements AckSenderWindow.RetransmitCommand {
public Command() {}
public void retransmit(long seqNo, Message msg) {
_retransmitBcastRequest(seqNo);
}
}
/** Protocol name */
private static final String PROT_NAME = "TOTAL";
/** Property names */
private static final String TRACE_PROP = "trace";
/** Minimum time between broadcast request retransmissions */
private long MIN_RETRANSMIT_INTERVAL = 500;
/** Average time between broadcast request retransmissions */
private long[] AVG_RETRANSMIT_INTERVAL = new long[]{1000,2000,3000,4000};
/** Null value for the IDs */
private static final long NULL_ID = -1;
// Layer sending states
/** No group has been joined yet */
private static final int NULL_STATE = -1;
/** When set, all messages are sent/received */
private static final int RUN = 0;
/**
* When set, only session-specific messages are sent/received, i.e. only
* messages essential to the session's integrity
*/
private static final int FLUSH = 1;
/** No message is sent to the layer below */
private static final int BLOCK = 2;
/** The state lock allowing multiple reads or a single write */
private RWLock stateLock = new RWLock();
/** Protocol layer message-sending state */
private int state = NULL_STATE;
/** The address of this stack */
private Address addr = null;
/** The address of the sequencer */
private Address sequencerAddr = null;
/**
* The sequencer's seq ID. The ID of the most recently broadcast reply
* message
*/
private long sequencerSeqID = NULL_ID;
/**
* The local sequence ID, i.e. the ID sent with the last broadcast request
* message. This is increased with every broadcast request sent to the
* sequencer and it's used to match the requests with the sequencer's
* replies
*/
private long localSeqID = NULL_ID;
/**
* The total order sequence ID. This is the ID of the most recently
* delivered broadcast message. As the sequence IDs are increasing without
* gaps, this is used to detect missing broadcast messages
*/
private long seqID = NULL_ID;
/**
* The list of unanswered broadcast requests to the sequencer. The entries
* are stored in increasing local sequence ID, i.e. in the order they were
*
* sent localSeqID -> Broadcast msg to be sent.
*/
private SortedMap reqTbl;
/**
* The list of received broadcast messages that haven't yet been delivered
* to the layer above. The entries are stored in increasing sequence ID,
* i.e. in the order they must be delivered above
*
* seqID -> Received broadcast msg
*/
private SortedMap upTbl;
/** Retranmitter for pending broadcast requests */
private AckSenderWindow retransmitter;
/**
* Print addresses in host_ip:port form to bypass DNS
*/
private String _addrToString(Object addr) { return (
addr == null? "<null>" :
((addr instanceof org.jgroups.stack.IpAddress)?
(((org.jgroups.stack.IpAddress)addr).getIpAddress(
).getHostAddress() + ":" +
((org.jgroups.stack.IpAddress)addr).getPort()) :
addr.toString())
);
}
/** @return this protocol's name */
private String _getName() {
return(PROT_NAME);
}
/**
* Configure the protocol based on the given list of properties
*
* @param properties the list of properties to use to setup this layer
* @return false if there was any unrecognized property or a property with
* an invalid value
*/
private boolean _setProperties(Properties properties) {
String value;
// trace
// Parse & remove property but ignore it; use Trace.trace instead
value = properties.getProperty(TRACE_PROP);
if (value != null) properties.remove(TRACE_PROP);
if (properties.size() > 0) {
Trace.error("TOTAL.setProperties()", "The following properties are not " +
"recognized: " + properties.toString());
return(false);
}
return(true);
}
/**
* Events that some layer below must handle
*
* @return the set of <code>Event</code>s that must be handled by some layer
* below
*/
Vector _requiredDownServices() {
Vector services = new Vector();
return(services);
}
/**
* Events that some layer above must handle
*
* @return the set of <code>Event</code>s that must be handled by some
* layer above
*/
Vector _requiredUpServices() {
Vector services = new Vector();
return(services);
}
/**
* Extract as many messages as possible from the pending up queue and send
* them to the layer above
*/
private void _deliverBcast() {
Message msg;
Header header;
synchronized(upTbl) {
while((msg = (Message)upTbl.remove(new Long(seqID+1))) != null) {
header = (Header)msg.removeHeader(getName());
if (header.localSeqID != NULL_ID) passUp(new Event(Event.MSG, msg));
++seqID;
}
} // synchronized(upTbl)
}
/**
* Add all undelivered bcasts sent by this member in the req queue and then
* replay this queue
*/
private void _replayBcast() {
Iterator it;
Message msg, reqMsg;
Header header;
long seqID;
// i. Remove all undelivered bcasts sent by this member and place them
// again in the pending bcast req queue
synchronized(upTbl) {
if (upTbl.size() > 0)
Trace.info("TOTAL", "Replaying undelivered bcasts");
it = upTbl.entrySet().iterator(); while(it.hasNext()) {
msg = (Message)((Map.Entry)it.next()).getValue(); it.remove();
if (!msg.getSrc().equals(addr)) {
Trace.info("TOTAL", "During replay: " +
"discarding BCAST[" +
((TOTAL.Header)msg.getHeader(getName())).seqID +
"] from " + _addrToString(msg.getSrc()));
continue;
}
header = (Header)msg.removeHeader(getName());
if (header.localSeqID == NULL_ID) continue;
_sendBcastRequest(msg, header.localSeqID);
}
} // synchronized(upTbl)
}
/**
* Send a unicast message: Add a <code>UCAST</code> header
*
* @param msg the message to unicast
* @return the message to send
*/
private Message _sendUcast(Message msg) {
msg.putHeader(getName(), new Header(Header.UCAST, NULL_ID, NULL_ID));
return(msg);
}
/**
* Replace the original message with a broadcast request sent to the
* sequencer. The original bcast message is stored locally until a reply to
* bcast is received from the sequencer. This function has the side-effect
* of increasing the <code>localSeqID</code>
*
* @param msg the message to broadcast
*/
private void _sendBcastRequest(Message msg) {
_sendBcastRequest(msg, ++localSeqID);
}
/**
* Replace the original message with a broadcast request sent to the
* sequencer. The original bcast message is stored locally until a reply
* to bcast is received from the sequencer
*
* @param msg the message to broadcast
* @param id the local sequence ID to use
*/
private void _sendBcastRequest(Message msg, long id) {
Message reqMsg;
// i. Store away the message while waiting for the sequencer's reply
// ii. Send a bcast request immediatelly and also schedule a
// retransmission
synchronized(reqTbl) {
reqTbl.put(new Long(id), msg);
}
_transmitBcastRequest(id);
retransmitter.add(id, msg);
}
/**
* Send the bcast request with the given localSeqID
*
* @param seqID the local sequence id of the
*/
private void _transmitBcastRequest(long seqID) {
Message reqMsg;
// i. If NULL_STATE, then ignore, just transient state before
// shutting down the retransmission thread
// ii. If blocked, be patient - reschedule
// iii. If the request is not pending any more, acknowledge it
// iv. Create a broadcast request and send it to the sequencer
if (state == NULL_STATE) {
Trace.info("TOTAL", "Transmit BCAST_REQ[" + seqID + "] in NULL_STATE");
return;
}
if (state == BLOCK) return;
synchronized(reqTbl) {
if (!reqTbl.containsKey(new Long(seqID))) {
retransmitter.ack(seqID);
return;
}
}
reqMsg = new Message(sequencerAddr, addr, new byte[0]);
reqMsg.putHeader(getName(), new Header(Header.REQ, seqID, NULL_ID));
passDown(new Event(Event.MSG, reqMsg));
}
/**
* Receive a unicast message: Remove the <code>UCAST</code> header
*
* @param msg the received unicast message
*/
private void _recvUcast(Message msg) {
msg.removeHeader(getName());
}
/**
* Receive a broadcast message: Put it in the pending up queue and then
* try to deliver above as many messages as possible
*
* @param msg the received broadcast message
*/
private void _recvBcast(Message msg) {
Header header = (Header)msg.getHeader(getName());
// i. Put the message in the up pending queue only if it's not
// already there, as it seems that the event may be received
// multiple times before a view change when all members are
// negotiating a common set of stable msgs
// ii. Deliver as many messages as possible
synchronized(upTbl) {
if (header.seqID <= seqID)
return;
upTbl.put(new Long(header.seqID), msg);
}
_deliverBcast();
}
/**
* Received a bcast request - Ignore if not the sequencer, else send a
* bcast reply
*
* @param msg the broadcast request message
*/
private void _recvBcastRequest(Message msg) {
Header header;
Message repMsg;
// i. If blocked, discard the bcast request
// ii. Assign a seqID to the message and send it back to the requestor
if (!addr.equals(sequencerAddr)) {
Trace.error("TOTAL", "Received bcast request " +
"but not a sequencer");
return;
}
if (state == BLOCK) {
Trace.info("TOTAL", "Blocked, discard bcast req");
return;
}
header = (Header)msg.getHeader(getName());
++sequencerSeqID;
repMsg = new Message(msg.getSrc(), addr, new byte[0]);
repMsg.putHeader(getName(), new Header(Header.REP, header.localSeqID,
sequencerSeqID));
passDown(new Event(Event.MSG, repMsg));
}
/**
* Received a bcast reply - Match with the pending bcast request and move
* the message in the list of messages to be delivered above
*
* @param header the header of the bcast reply
*/
private void _recvBcastReply(Header header) {
Message msg;
long id;
// i. If blocked, discard the bcast reply
// ii. Assign the received seqID to the message and broadcast it
// iii.
// - Acknowledge the message to the retransmitter
// - If non-existent BCAST_REQ, send a fake bcast to avoid seqID gaps
// - If localID == NULL_ID, it's a null BCAST, else normal BCAST
// - Set the seq ID of the message to the one sent by the sequencer
if (state == BLOCK) {
Trace.info("TOTAL", "Blocked, discard bcast rep");
return;
}
synchronized(reqTbl) {
msg = (Message)reqTbl.remove(new Long(header.localSeqID));
}
if (msg != null) {
retransmitter.ack(header.localSeqID);
id = header.localSeqID;
} else {
Trace.info("TOTAL", "Bcast reply to " +
"non-existent BCAST_REQ[" + header.localSeqID +
"], Sending NULL bcast");
id = NULL_ID;
msg = new Message(null, addr, new byte[0]);
}
msg.putHeader(getName(), new Header(Header.BCAST, id, header.seqID));
passDown(new Event(Event.MSG, msg));
}
/**
* Resend the bcast request with the given localSeqID
*
* @param seqID the local sequence id of the
*/
private void _retransmitBcastRequest(long seqID) {
try { stateLock.readLock(); try {
Trace.info("TOTAL", "Retransmit BCAST_REQ[" + seqID + "]");
_transmitBcastRequest(seqID);
// ** Revoke the shared lock
} finally { stateLock.readUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
}
/* Up event handlers
* If the return value is true the event travels further up the stack
* else it won't be forwarded
*/
/**
* Prepare for a VIEW_CHANGE: switch to flushing state
*
* @param event the BLOCK event
* @return true if the event is to be forwarded further up
*/
private boolean _upBlock(Event event) {
try { stateLock.writeLock(); try {
state = FLUSH;
} finally { stateLock.writeUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
return(true);
}
/**
* Handle an up MSG event
*
* @param event the MSG event
* @return true if the event is to be forwarded further up
*/
private boolean _upMsg(Event event) {
Message msg;
Object obj;
Header header;
try { stateLock.readLock(); try {
// If NULL_STATE, shouldn't receive any msg on the up queue!
if (state == NULL_STATE) {
Trace.error("TOTAL", "Up msg in NULL_STATE");
return(false);
}
// Peek the header:
// (UCAST) A unicast message - Send up the stack
// (BCAST) A broadcast message - Handle specially
// (REQ) A broadcast request - Handle specially
// (REP) A broadcast reply from the sequencer - Handle specially
msg = (Message)event.getArg();
if (!((obj = msg.getHeader(getName())) instanceof TOTAL.Header)) {
Trace.error("TOTAL", "No TOTAL.Header found");
return(false);
}
header = (Header)obj;
switch(header.type) {
case Header.UCAST:
_recvUcast(msg);
return(true);
case Header.BCAST:
_recvBcast(msg);
return(false);
case Header.REQ:
_recvBcastRequest(msg);
return(false);
case Header.REP:
_recvBcastReply(header);
return(false);
default:
Trace.error("TOTAL", "Unknown header type");
return(false);
}
// ** Revoke the shared lock
} finally { stateLock.readUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
return(true);
}
/**
* Set the address of this group member
*
* @param event the SET_LOCAL_ADDRESS event
* @return true if event should be forwarded further up
*/
private boolean _upSetLocalAddress(Event event) {
try { stateLock.writeLock(); try {
addr = (Address)event.getArg();
} finally { stateLock.writeUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
return(true);
}
/**
* Handle view changes
*
* param event the VIEW_CHANGE event
* @return true if the event should be forwarded to the layer above
*/
private boolean _upViewChange(Event event) {
Object oldSequencerAddr;
try { stateLock.writeLock(); try {
state = RUN;
// i. See if this member is the sequencer
// ii. If this is the sequencer, reset the sequencer's sequence ID
// iii. Reset the last received sequence ID
// iv. Replay undelivered bcasts: Put all the undelivered bcasts
// sent by us back to the req queue and discard the rest
oldSequencerAddr = sequencerAddr;
sequencerAddr =
(Address)((View)event.getArg()).getMembers().elementAt(0);
if (addr.equals(sequencerAddr)) {
sequencerSeqID = NULL_ID;
if ((oldSequencerAddr == null) ||
(!addr.equals(oldSequencerAddr)))
Trace.info("TOTAL", "I'm the new sequencer");
}
seqID = NULL_ID;
_replayBcast();
} finally { stateLock.writeUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
return(true);
}
/*
* Down event handlers
* If the return value is true the event travels further down the stack
* else it won't be forwarded
*/
/**
* Blocking confirmed - No messages should come from above until a
* VIEW_CHANGE event is received. Switch to blocking state.
*
* @param event the BLOCK_OK event
* @return true if event should travel further down
*/
private boolean _downBlockOk(Event event) {
try { stateLock.writeLock(); try {
state = BLOCK;
} finally { stateLock.writeUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
return(true);
}
/**
* A MSG event travelling down the stack. Forward unicast messages, treat
* specially the broadcast messages.<br>
*
* If in <code>BLOCK</code> state, i.e. it has replied to a
* <code>BLOCk_OK</code> and hasn't yet received a
* <code>VIEW_CHANGE</code> event, messages are discarded<br>
*
* If in <code>FLUSH</code> state, forward unicast but queue broadcasts
*
* @param event the MSG event
* @return true if event should travel further down
*/
private boolean _downMsg(Event event) {
Message msg;
try { stateLock.readLock(); try {
// i. Discard all msgs, if in NULL_STATE
// ii. Discard all msgs, if blocked
if (state == NULL_STATE) {
Trace.error("TOTAL", "Discard msg in NULL_STATE");
return(false);
}
if (state == BLOCK) {
Trace.error("TOTAL", "Blocked, discard msg");
return(false);
}
msg = (Message)event.getArg();
if (msg.getDest() == null) {
_sendBcastRequest(msg);
return(false);
} else {
msg = _sendUcast(msg);
event.setArg(msg);
}
// ** Revoke the shared lock
} finally { stateLock.readUnlock(); }
} catch(RWLock.IntException ex) {
Trace.error("TOTAL", ex.getMessage());
}
return(true);
}
/**
* Prepare this layer to receive messages from above
*
* @param event the START event
* @return true if event should be forwarded further up
*/
public void start() throws Exception {
TimeScheduler timer;
timer = stack != null ? stack.timer : null;
if(timer == null)
throw new Exception("TOTAL.start(): timer is null");
reqTbl = new TreeMap();
upTbl = new TreeMap();
retransmitter = new AckSenderWindow(new Command(), AVG_RETRANSMIT_INTERVAL);
}
/**
* Handle the stop() method travelling down the stack.
* <p>
* The local addr is set to null, since after a Start->Stop->Start
* sequence this member's addr is not guaranteed to be the same
*
*/
public void stop() {
try {
stateLock.writeLock();
try {
state = NULL_STATE;
retransmitter.reset();
reqTbl.clear();
upTbl.clear();
addr = null;
}
finally {
stateLock.writeUnlock();
}
}
catch(RWLock.IntException ex) {
Trace.error("TOTAL.stop()", ex.getMessage());
}
}
/**
* Process an event coming from the layer below
*
* @param event the event to process
*/
private void _up(Event event) {
switch(event.getType()) {
case Event.BLOCK:
if (!_upBlock(event)) return;
break;
case Event.MSG:
if (!_upMsg(event)) return;
break;
case Event.SET_LOCAL_ADDRESS:
if (!_upSetLocalAddress(event)) return;
break;
case Event.VIEW_CHANGE:
if (!_upViewChange(event)) return;
break;
default: break;
}
passUp(event);
}
/**
* Process an event coming from the layer above
*
* @param event the event to process
*/
private void _down(Event event) {
switch(event.getType()) {
case Event.BLOCK_OK:
if (!_downBlockOk(event)) return;
break;
case Event.MSG:
if (!_downMsg(event)) return;
break;
default: break;
}
passDown(event);
}
/**
* Create the TOTAL layer
*/
public TOTAL() {}
// Methods deriving from <code>Protocol</code>
// javadoc inherited from superclass
public String getName() {
return(_getName());
}
// javadoc inherited from superclass
public boolean setProperties(Properties properties) {
return(_setProperties(properties));
}
// javadoc inherited from superclass
public Vector requiredDownServices() {
return(_requiredDownServices());
}
// javadoc inherited from superclass
public Vector requiredUpServices() {
return(_requiredUpServices());
}
// javadoc inherited from superclass
public void up(Event event) {
_up(event);
}
// javadoc inherited from superclass
public void down(Event event) {
_down(event);
}
}
|
package org.jboss.forge.arquillian;
import java.io.File;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.aether.artifact.Artifact;
import org.eclipse.aether.graph.Dependency;
import org.jboss.arquillian.container.spi.client.deployment.DeploymentDescription;
import org.jboss.arquillian.container.spi.client.deployment.TargetDescription;
import org.jboss.arquillian.container.spi.client.protocol.ProtocolDescription;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.OverProtocol;
import org.jboss.arquillian.container.test.api.ShouldThrowException;
import org.jboss.arquillian.container.test.api.TargetsContainer;
import org.jboss.arquillian.container.test.spi.client.deployment.DeploymentScenarioGenerator;
import org.jboss.arquillian.test.spi.TestClass;
import org.jboss.forge.arquillian.archive.ForgeRemoteAddon;
import org.jboss.forge.arquillian.archive.RepositoryForgeArchive;
import org.jboss.forge.arquillian.maven.ProjectHelper;
import org.jboss.forge.furnace.addons.AddonId;
import org.jboss.forge.furnace.util.Annotations;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.descriptor.api.Descriptor;
public class ForgeDeploymentScenarioGenerator implements DeploymentScenarioGenerator
{
Map<String, String> dependencyMap;
@Override
public List<DeploymentDescription> generate(TestClass testClass)
{
List<DeploymentDescription> deployments = new ArrayList<DeploymentDescription>();
Method[] deploymentMethods = testClass.getMethods(Deployment.class);
for (Method deploymentMethod : deploymentMethods)
{
validate(deploymentMethod);
if (deploymentMethod.isAnnotationPresent(Dependencies.class))
deployments.addAll(generateDependencyDeployments(testClass.getJavaClass(), deploymentMethod));
deployments.add(generateDeployment(deploymentMethod));
}
return deployments;
}
private Collection<DeploymentDescription> generateDependencyDeployments(Class<?> classUnderTest,
Method deploymentMethod)
{
Dependencies dependency = deploymentMethod.getAnnotation(Dependencies.class);
Collection<DeploymentDescription> deployments = new ArrayList<DeploymentDescription>();
if (dependency.value() != null)
for (AddonDependency addon : dependency.value())
{
String version;
if (addon.version().isEmpty())
{
version = resolveVersionFromPOM(classUnderTest, addon.name());
if (version == null)
{
throw new IllegalStateException("Could not resolve the version for [" + addon.name()
+ "]. Either specify the version for this @" + AddonDependency.class.getSimpleName()
+ " in [" + classUnderTest.getName() + "] or add it to pom.xml located at ["
+ getPomFileFor(classUnderTest) + "]");
}
}
else
{
version = addon.version();
}
AddonId id = AddonId.from(addon.name(), version);
ForgeRemoteAddon remoteAddon = ShrinkWrap.create(ForgeRemoteAddon.class).setAddonId(id);
if (Annotations.isAnnotationPresent(deploymentMethod, DeployToRepository.class))
remoteAddon.setAddonRepository(Annotations.getAnnotation(deploymentMethod, DeployToRepository.class)
.value());
DeploymentDescription deploymentDescription = new DeploymentDescription(id.toCoordinates(), remoteAddon);
deploymentDescription.shouldBeTestable(false);
deployments.add(deploymentDescription);
}
return deployments;
}
/**
* Read the pom.xml of the project containing the class under test
*
* @param classUnderTest
* @param name
* @return
*/
private String resolveVersionFromPOM(Class<?> classUnderTest, String name)
{
if (dependencyMap == null)
{
dependencyMap = new HashMap<String, String>();
File pomFile = getPomFileFor(classUnderTest);
try
{
List<Dependency> dependencies = ProjectHelper.INSTANCE.resolveDependenciesFromPOM(pomFile);
for (Dependency dependency : dependencies)
{
Artifact artifact = dependency.getArtifact();
String addonName = artifact.getGroupId() + ":" + artifact.getArtifactId();
String version = artifact.getBaseVersion();
dependencyMap.put(addonName, version);
}
}
catch (Exception e)
{
// TODO log this instead?
e.printStackTrace();
}
}
return dependencyMap.get(name);
}
private File getPomFileFor(Class<?> classUnderTest)
{
URL resource = classUnderTest.getClassLoader().getResource("");
if (resource == null)
{
throw new IllegalStateException("Could not find the pom.xml for class " + classUnderTest.getName());
}
String directory = resource.getFile();
File pomFile = findBuildDescriptor(directory);
return pomFile;
}
private File findBuildDescriptor(String classLocation)
{
File pom = null;
File dir = new File(classLocation);
while (dir != null)
{
File testPom = new File(dir, "pom.xml");
if (testPom.isFile())
{
pom = testPom;
break;
}
dir = dir.getParentFile();
}
return pom;
}
private void validate(Method deploymentMethod)
{
if (!Modifier.isStatic(deploymentMethod.getModifiers()))
{
throw new IllegalArgumentException("Method annotated with " + Deployment.class.getName() + " is not static. "
+ deploymentMethod);
}
if (!Archive.class.isAssignableFrom(deploymentMethod.getReturnType())
&& !Descriptor.class.isAssignableFrom(deploymentMethod.getReturnType()))
{
throw new IllegalArgumentException(
"Method annotated with " + Deployment.class.getName() +
" must have return type " + Archive.class.getName() + " or " + Descriptor.class.getName()
+ ". " + deploymentMethod);
}
if (deploymentMethod.getParameterTypes().length != 0)
{
throw new IllegalArgumentException("Method annotated with " + Deployment.class.getName()
+ " can not accept parameters. " + deploymentMethod);
}
}
/**
* @param deploymentMethod
* @return
*/
private DeploymentDescription generateDeployment(Method deploymentMethod)
{
TargetDescription target = generateTarget(deploymentMethod);
ProtocolDescription protocol = generateProtocol(deploymentMethod);
Deployment deploymentAnnotation = deploymentMethod.getAnnotation(Deployment.class);
DeploymentDescription description = null;
if (Archive.class.isAssignableFrom(deploymentMethod.getReturnType()))
{
Archive<?> archive = invoke(Archive.class, deploymentMethod);
if (archive instanceof RepositoryForgeArchive)
{
if (Annotations.isAnnotationPresent(deploymentMethod, DeployToRepository.class))
((RepositoryForgeArchive) archive).setAddonRepository(Annotations.getAnnotation(deploymentMethod,
DeployToRepository.class).value());
}
description = new DeploymentDescription(deploymentAnnotation.name(), archive);
description.shouldBeTestable(deploymentAnnotation.testable());
}
else if (Descriptor.class.isAssignableFrom(deploymentMethod.getReturnType()))
{
description = new DeploymentDescription(deploymentAnnotation.name(),
invoke(Descriptor.class, deploymentMethod));
}
description.shouldBeManaged(deploymentAnnotation.managed());
description.setOrder(deploymentAnnotation.order());
if (target != null)
{
description.setTarget(target);
}
if (protocol != null)
{
description.setProtocol(protocol);
}
if (deploymentMethod.isAnnotationPresent(ShouldThrowException.class))
{
description.setExpectedException(deploymentMethod.getAnnotation(ShouldThrowException.class).value());
}
return description;
}
/**
* @param deploymentMethod
* @return
*/
private TargetDescription generateTarget(Method deploymentMethod)
{
if (deploymentMethod.isAnnotationPresent(TargetsContainer.class))
{
return new TargetDescription(deploymentMethod.getAnnotation(TargetsContainer.class).value());
}
return TargetDescription.DEFAULT;
}
/**
* @param deploymentMethod
* @return
*/
private ProtocolDescription generateProtocol(Method deploymentMethod)
{
if (deploymentMethod.isAnnotationPresent(OverProtocol.class))
{
return new ProtocolDescription(deploymentMethod.getAnnotation(OverProtocol.class).value());
}
return ProtocolDescription.DEFAULT;
}
/**
* @param deploymentMethod
* @return
*/
private <T> T invoke(Class<T> type, Method deploymentMethod)
{
try
{
return type.cast(deploymentMethod.invoke(null));
}
catch (Exception e)
{
throw new RuntimeException("Could not invoke deployment method: " + deploymentMethod, e);
}
}
}
|
package org.nutz.mvc.upload;
import java.util.Map;
import java.util.Map.Entry;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.nutz.lang.util.NutMap;
/**
*
*
* @author zozoh(zozohtnt@gmail.com)
*/
public abstract class Uploads {
/**
* @param req
*
* @return null
*/
public static UploadInfo getInfo(HttpServletRequest req) {
return (UploadInfo) req.getSession().getAttribute(UploadInfo.class.getName());
}
/**
* @param req
*
* @return
*/
public static UploadInfo createInfo(HttpServletRequest req) {
UploadInfo info = new UploadInfo();
HttpSession sess = req.getSession();
if (null != sess) {
sess.setAttribute(UploadInfo.SESSION_NAME, info);
}
info.sum = req.getContentLength();
return info;
}
/**
* MAP QueryString MAP
*
* @param req
*
* @return MAP
*/
@SuppressWarnings("unchecked")
public static NutMap createParamsMap(HttpServletRequest req) {
NutMap params = new NutMap();
// parse query strings
Map<String, String []> paramsZ = req.getParameterMap();
if (null != paramsZ && paramsZ.size() >0 ){
for (Entry<String, String []> ppp : paramsZ.entrySet()) {
if (ppp.getValue() != null ) {
if (ppp.getValue().length > 0)
params.put(ppp.getKey(), ppp.getValue()[0]);
else
params.put(ppp.getKey(), "");
} else
params.put(ppp.getKey(), null);
}
}
return params;
}
}
|
package verification.timed_state_exploration.zoneProject;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import lpn.parser.ExprTree;
import lpn.parser.LhpnFile;
import lpn.parser.Transition;
import lpn.parser.Variable;
import verification.platu.lpn.DualHashMap;
import verification.platu.lpn.LpnTranList;
import verification.platu.main.Options;
import verification.platu.stategraph.State;
/**
* This class is for storing and manipulating the delay information for transitions as well as
* the values of continuous variables including rate their rates.
* A Zone represents the non-zero continuous variables and the timers for the delay in a
* Difference Bound Matrix (DBM)
*
* t0 c0 c1 t0 t1
* t0 m00 m01 m02 m03 m04
* c0 m10 m11 m12 m13 m14
* c1 m20 m21 m22 m23 m24
* t0 m30 m31 m32 m33 m34
* t1 m40 m41 m42 m43 m44
*
* For the timers, tj - ti <= m(ti,tj) where m(ti,tj) represents the element of the matrix with
* row ti and column tj. In particular, m(t0,ti) is an upper bound for ti and -m(ti,0) is a
* lower bound for ti. The continuous variables are normalized to have rate one before being
* placed in the matrix. Thus for a continuous variables ci with rate ri, the entry m(t0, ci) is
* (upper bound)/ri where 'upper bound' is the upper bound for the continuous variable and
* m(ci,t0) is -1*(lower bound)/ri where 'lower bound' is the lower bound of the continuous variable.
* When the rate is negative, dividing out by the rate switches the inequalities. For example,
* if ci is a continuous variable such that l < ci < u for some values l and u, then the normalized
* ci (that is ci/ri) is satisfies u/ri < ci/r < l/ri. Hence the m(t0,ci) is (lower bound)/ri and
* m(ci,0) is -1*(upper bound)/ri. The m(ci,cj) as well as m(ti,ci) and m(ci,ti) give the same kind
* of relational information as with just timers, but with the normalized values of the continuous
* variables. Note that a Zone with normalized continuous variables is referred to as being a 'warped'
* zone.
*
* The rate zero continuous variables are also stored in a Zone object, but they are not present in
* the zone. The Zone merely records the range of the continuous variable.
*
* The timers are referenced by the name of the transition that they are associated with and continuous
* variables are referenced by their name as well. In addition for non-zero rate continuous variables and
* the timers, several method will allow them to be referred by their index as a part of the (DMB). For
* example, c0 in the above DBM example, has index 1.
*
* A third way that the timers and variables (both rate zero and rate non-zero) can be referred to by
* an LPNTransitionPair object. These objects refer to a timer or continuous variable by providing the index
* of the corresponding transition or the index of the continuous variable as a member of an LPN and the
* index of that LPN. The LPNTransitionPair can be made more specific in the case of the continuous variables
* by using an LPNContinuousPair. These objects also provide the rate of the variable. LPNTransitionPairs
* can be used with continuous variables when they are only being used as an index only. If the rate is needed
* for the method, then LPNContinuousPairs must be used.
*
* @author Andrew N. Fisher
*
*/
public class Zone{
/*Abstraction Function :
* The difference bound matrix is stored in the _matrix member field, along with
* the upper and lower bounds of the timers and rates for continuous variables.
* The upper and lower bounds are stored when the timer becomes enabled. The upper and
* lower bounds of the rates are stored when the rate is assigned. The upper and lower
* bounds are stored in the first row and first column of the _matrix member field.
* The DBM occupies the rest of the array, that is, the sub-array formed by removing
* the first column and first row.
* For example, let t1 be a timer for a transition whose delay is between 2 and 3. Further
* let c1 be a continuous variable with rate between 4 and 5. Then _matrix would look like
* lb t0 c1 t1
* ub x 0 5 3
* t0 0 m m m
* c1 -4 m m m
* t1 -2 m m m
*
* with out the lb, ub, t0, t1, and c1.
*
* The x is does not represent anything and will most likely be zero. The upper and lower
* bounds for the zero timer (which is always present) are always 0. The m's comprise
* the DBM.
*
* For the most part, a timer or a continuous variable is referred to internally by an
* LPNTransitionPair. An LPNTransitionPair combines the index of the transition (or
* continuous variable) with the index of the LPN that the transition (or continuous
* variables) is a member of. Both continuous variables and transitions can be referred
* to by an LPNTransitionPair; however, it is better to use an LPNContinuousPair (which
* is inherited from the LPNTransitionPair) to refer to continuous variables.
* LPNContinuousPairs are used to distinguish continuous variables from transitions. They
* also store the current rate of the continuous variable.
*
* The LPNTransitionPairs are stored in the _indexToTimerPair member field. The row/column
* in the DBM for a transition is determined by the its position in this array. For example,
* suppose a transition t has an LPNTransitionPair ltTranPair that is the third element of
* the _indexToTimerPair. Then t will be the third column/row of the DBM.
*
* Continuous variables are handled slightly differently. The LPNContinuousPair for the
* continuous variables with a non-zero rate are stored in the _indexToTimerPair just as
* with the transitions. And just like with the transitions, the index in the DBM is
* determined by the index of the LPNContinuousPair in the _indexToTimerPair. However,
* rate zero continuous variables are stored in the _rateZeroContinuous member variable.
* The _rateZerContinuous pairs an LPNContinuousPair with a VariableRangePair. The
* VariableRangePair combines the variable with its upper and lower bound.
*
*/
/* Representation invariant :
* Zones are immutable.
* Integer.MAX_VALUE is used to logically represent infinity.
* The lb and ub values for a timer should be set when the timer is enabled.
* A negative hash code indicates that the hash code has not been set.
* The index of the timer in _indexToTimer is the index in the DBM and should contain
* the zeroth timer.
* The array _indexToTimerPair should always be sorted.
* The index of the LPN should match where it is in the _lpnList, that is, if lpn is
* and LhpnFile object in _lpnList, then _lpnList[getLpnIndex()] == lpn.
* The LPNTransitionPair in the _indexToTimer array should be an LPNContinuousPair
* when it stores the index to a continuous variable. Testing that the index
* is an LPNContinuousPair is used to determined if the indexing object points
* to a continuous variable. Also the LPNContinuousPair keeps track of the current rate.
*
*/
/*
* Resource List :
* TODO : Create a list reference where the algorithms can be found that this class
* depends on.
*/
public static final int INFINITY = Integer.MAX_VALUE;
/* The lower and upper bounds of the times as well as the dbm. */
private int[][] _matrix;
/* Maps the index to the timer. The index is row/column of the DBM sub-matrix.
* Logically the zero timer is given index -1.
* */
//private int[] _indexToTimer;
private LPNTransitionPair[] _indexToTimerPair;
/* The hash code. */
private int _hashCode;
/* A lexicon between a transitions index and its name. */
//private static HashMap<Integer, Transition> _indexToTransition;
/* Set if a failure in the testSplit method has fired already. */
//private static boolean _FAILURE = false;
/* Hack to pass a parameter to the equals method though a variable */
//private boolean subsetting = false;
/* Stores the continuous variables that have rate zero */
// HashMap<LPNTransitionPair, Variable> _rateZeroContinuous;
//DualHashMap<RangeAndPairing, Variable> _rateZeroContinuous;
DualHashMap<LPNContAndRate, VariableRangePair> _rateZeroContinuous;
/* Records the largest zone that occurs. */
public static int ZoneSize = 0;
/* Write a log file */
private static BufferedWriter _writeLogFile = null;
/**
* Returns the write log.
* @return
* The _writeLogfile.
*/
public static BufferedWriter get_writeLogFile(){
return _writeLogFile;
}
/**
* Sets the BufferedWriter.
* @param writeLogFile
*/
public static void set_writeLogFile(BufferedWriter writeLogFile){
_writeLogFile = writeLogFile;
}
/**
* Sets the writeLogFile to null.
*/
public static void reset_writeLogFile(){
_writeLogFile = null;
}
private void checkZoneMaxSize(){
if(dbmSize() > ZoneSize){
ZoneSize = dbmSize();
}
}
private LhpnFile[] _lpnList;
/*
* Turns on and off subsets for the zones.
* True means subset will be considered.
* False means subsets will not be considered.
*/
private static boolean _subsetFlag = true;
/*
* Turns on and off supersets for zones.
* True means that supersets will be considered.
* False means that supersets will not be considered.
*/
private static boolean _supersetFlag = true;
/**
* Gets the value of the subset flag.
* @return
* True if subsets are requested, false otherwise.
*/
public static boolean getSubsetFlag(){
return _subsetFlag;
}
/**
* Sets the value of the subset flag.
* @param useSubsets
* The value for the subset flag. Set to true if
* supersets are to be considered, false otherwise.
*/
public static void setSubsetFlag(boolean useSubsets){
_subsetFlag = useSubsets;
}
/**
* Gets the value of the superset flag.
* @return
* True if supersets are to be considered, false otherwise.
*/
public static boolean getSupersetFlag(){
return _supersetFlag;
}
/**
* Sets the superset flag.
* @param useSupersets
* The value of the superset flag. Set to true if
* supersets are to be considered, false otherwise.
*/
public static void setSupersetFlag(boolean useSupersets){
_supersetFlag = useSupersets;
}
/**
* Construct a zone that has the given timers.
* @param timers
* The ith index of the array is the index of the timer. For example,
* if timers = [1, 3, 5], then the zeroth row/column of the DBM is the
* timer of the transition with index 1, the first row/column of the
* DBM is the timer of the transition with index 3, and the 2nd
* row/column is the timer of the transition with index 5. Do not
* include the zero timer.
* @param matrix
* The DBM augmented with the lower and upper bounds of the delays for the
* transitions. For example, suppose a zone has timers [1, 3, 5] (as
* described in the timers parameters). The delay for transition 1 is
* [1, 3], the delay for transition 3 is [2,5], and the delay for
* transition 5 is [4,6]. Also suppose the DBM is
* t0 t1 t3 t5
* t0 | 0, 3, 3, 3 |
* t1 | 0, 0, 0, 0 |
* t3 | 0, 0, 0, 0 |
* t5 | 0, 0, 0, 0 |
* Then the matrix that should be passed is
* lb t0 t1 t3 t5
* ub| 0, 0, 3, 5, 6|
* t0| 0, 0, 3, 3, 3|
* t1|-1, 0, 0, 0, 0|
* t3|-2, 0, 0, 0, 0|
* t5|-4, 0, 0, 0, 0|
* The matrix should be non-null and the zero timer should always be the
* first timer, even when there are no other timers.
*/
public Zone(int[] timers, int[][] matrix)
{
// A negative number indicates that the hash code has not been set.
_hashCode = -1;
// Make a copy to reorder the timers.
// _indexToTimer = Arrays.copyOf(timers, timers.length);
// Make a copy to reorder the timers.
_indexToTimerPair = new LPNTransitionPair[timers.length];
for(int i=0; i<timers.length; i++){
// _indexToTimerPair[i] = new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN,
// timers[i], true);
_indexToTimerPair[i] = new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN,
timers[i]);
}
// Sorting the array.
// Arrays.sort(_indexToTimer);
// Sorting the array.
Arrays.sort(_indexToTimerPair);
//if(_indexToTimer[0] != 0)
// if(_indexToTimer[0] != -1)
// // Add the zeroth timer.
// int[] newIndexToTimer = new int[_indexToTimer.length+1];
// for(int i=0; i<_indexToTimer.length; i++)
// newIndexToTimer[i+1] = _indexToTimer[i];
// _indexToTimer = newIndexToTimer;
// _indexToTimer[0] = -1;
if(_indexToTimerPair[0].get_transitionIndex() != -1){
// Add the zeroth timer.
LPNTransitionPair[] newIndexToTimerPair =
new LPNTransitionPair[_indexToTimerPair.length];
for(int i=0; i<_indexToTimerPair.length; i++){
newIndexToTimerPair[i+1] = _indexToTimerPair[i];
}
_indexToTimerPair = newIndexToTimerPair;
// _indexToTimerPair[0] = new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN, -1, true);
_indexToTimerPair[0] = new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN, -1);
}
// if(_indexToTimer[0] < 0)
// // Add a zero timer.
// else if(_indexToTimer[0] > 0)
// int[] newTimerIndex = new int[_indexToTimer.length+1];
// for(int i=0; i<_indexToTimer.length; i++)
// newTimerIndex[i+1] = _indexToTimer[i];
// Map the old index of the timer to the new index of the timer.
HashMap<Integer, Integer> newIndex = new HashMap<Integer, Integer>();
// For the old index, find the new index.
for(int i=0; i<timers.length; i++)
{
// Since the zeroth timer is not included in the timers passed
// to the index in the DBM is 1 more than the index of the timer
// in the timers array.
//newIndex.put(i+1, Arrays.binarySearch(_indexToTimer, timers[i]));
// LPNTransitionPair searchValue =
// new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN, timers[i], true);
LPNTransitionPair searchValue =
new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN, timers[i]);
newIndex.put(i+1, Arrays.binarySearch(_indexToTimerPair, searchValue));
}
// Add the zero timer index.
newIndex.put(0, 0);
// Initialize the matrix.
_matrix = new int[matrixSize()][matrixSize()];
// Copy the DBM
for(int i=0; i<dbmSize(); i++)
{
for(int j=0; j<dbmSize(); j++)
{
// Copy the passed in matrix to _matrix.
setDbmEntry(newIndex.get(i), newIndex.get(j),
matrix[dbmIndexToMatrixIndex(i)][dbmIndexToMatrixIndex(j)]);
// In the above, changed setDBMIndex to setdbm
}
}
// Copy in the upper and lower bounds. The zero time does not have an upper or lower bound
// so the index starts at i=1, the first non-zero timer.
for(int i=1; i< dbmSize(); i++)
{
setUpperBoundbydbmIndex(newIndex.get(i), matrix[0][dbmIndexToMatrixIndex(i)]);
// Note : The method setLowerBoundbydbmIndex, takes the value of the lower bound
// and the matrix stores the negative of the lower bound. So the matrix value
// must be multiplied by -1.
setLowerBoundbydbmIndex(newIndex.get(i), -1*matrix[dbmIndexToMatrixIndex(i)][0]);
}
recononicalize();
}
/**
* Initializes a zone according to the markings of state.
* @param currentState
* The zone is initialized as if all enabled timers
* have just been enabled.
*/
public Zone(State initialState)
{
// Extract the associated LPN.
LhpnFile lpn = initialState.getLpn();
int LPNIndex = lpn.getLpnIndex();
if(_lpnList == null){
// If no LPN exists yet, create it and put lpn in it.
_lpnList = new LhpnFile[LPNIndex+1];
_lpnList[LPNIndex] = lpn;
}
else if(_lpnList.length <= LPNIndex){
// The list does not contain the lpn.
LhpnFile[] tmpList = _lpnList;
_lpnList = new LhpnFile[LPNIndex+1];
_lpnList[LPNIndex] = lpn;
// Copy any that exist already.
for(int i=0; i<_lpnList.length; i++){
_lpnList[i] = tmpList[i];
}
}
else if(_lpnList[LPNIndex] != lpn){
// This checks that the appropriate lpn is in the right spot.
// If not (which gets you in this block), then this fixes it.
_lpnList[LPNIndex] = lpn;
}
// Default value for the hash code indicating that the hash code has not
// been set yet.
_hashCode = -1;
// Get the list of currently enabled Transitions by their index.
boolean[] enabledTran = initialState.getTranVector();
ArrayList<LPNTransitionPair> enabledTransitionsArrayList =
new ArrayList<LPNTransitionPair>();
// LPNTransitionPair zeroPair = new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER, -1);
LPNTransitionPair zeroPair = new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER,
LPNTransitionPair.ZERO_TIMER);
// Add the zero timer first.
enabledTransitionsArrayList.add(zeroPair);
// The index of the boolean value corresponds to the index of the Transition.
for(int i=0; i<enabledTran.length; i++){
if(enabledTran[i]){
enabledTransitionsArrayList.add(new LPNTransitionPair(LPNIndex, i));
}
}
_indexToTimerPair = enabledTransitionsArrayList.toArray(new LPNTransitionPair[0]);
_matrix = new int[matrixSize()][matrixSize()];
for(int i=1; i<dbmSize(); i++)
{
// Get the name for the timer in the i-th column/row of DBM
String tranName =
lpn.getTransition(_indexToTimerPair[i].get_transitionIndex()).getLabel();
ExprTree delay = lpn.getDelayTree(tranName);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
lpn.getAllVarsWithValuesAsString(initialState.getVariableVector());
// Set the upper and lower bound.
int upper, lower;
if(delay.getOp().equals("uniform"))
{
ExprTree lowerDelay = delay.getLeftChild();
ExprTree upperDelay = delay.getRightChild();
lower = (int) lowerDelay.evaluateExpr(varValues);
upper = (int) upperDelay.evaluateExpr(varValues);
}
else
{
lower = (int) delay.evaluateExpr(varValues);
upper = lower;
}
setLowerBoundbydbmIndex(i, lower);
setUpperBoundbydbmIndex(i, upper);
}
// Advance the time and tighten the bounds.
advance();
recononicalize();
checkZoneMaxSize();
}
/**
* Creates a Zone based on the local states.
* @param localStates
* The current state (or initial) of the LPNs.
*/
public Zone(State[] localStates){
// Open the log file.
if(_writeLogFile == null && Options.get_TimingLogfile() != null){
try{
_writeLogFile =
new BufferedWriter(
new FileWriter(Options.get_TimingLogfile()));
} catch (IOException e) {
e.printStackTrace();
}finally{
}
}
// Initialize hash code to -1 (indicating nothing cached).
_hashCode = -1;
// Initialize the LPN list.
initialize_lpnList(localStates);
// Get the enabled transitions. This initializes the _indexTotimerPair
// which stores the relevant information.
// This method will also initialize the _rateZeroContinuous
initialize_indexToTimerPair(localStates);
// Initialize the matrix.
_matrix = new int[matrixSize()][matrixSize()];
// Set the lower bound/ upper bounds of the timers and the rates.
initializeLowerUpperBounds(getAllNames(), localStates);
// Initialize the row and column entries for the continuous variables.
initializeRowColumnContVar();
// Create a previous zone to the initial zone for the sake of warping.
Zone tmpZone = beforeInitialZone();
dbmWarp(tmpZone);
recononicalize();
// Advance Time
//advance();
advance(localStates);
// Re-canonicalize
recononicalize();
// Check the size of the DBM.
checkZoneMaxSize();
}
/**
* Creates a Zone based on the local states.
* @param localStates
* The current state (or initial) of the LPNs.
*/
public Zone(State[] localStates, boolean init){
// Extract the local states.
//State[] localStates = tps.toStateArray();
// Initialize hash code to -1 (indicating nothing cached).
_hashCode = -1;
// Initialize the LPN list.
initialize_lpnList(localStates);
// Get the enabled transitions. This initializes the _indexTotimerPair
// which stores the relevant information.
// This method will also initialize the _rateZeroContinuous
initialize_indexToTimerPair(localStates);
// Initialize the matrix.
_matrix = new int[matrixSize()][matrixSize()];
// Set the lower bound/ upper bounds of the timers and the rates.
initializeLowerUpperBounds(getAllNames(), localStates);
// Initialize the row and column entries for the continuous variables.
initializeRowColumnContVar();
if(init){
return;
}
// Advance Time
//advance();
advance(localStates);
// Re-canonicalize
recononicalize();
// Check the size of the DBM.
checkZoneMaxSize();
}
// /**
// * Sets ups a zone containing continuous variables only.
// * @param continuousValues
// * The values to populate the zone with.
// */
// public Zone(HashMap<LPNContinuousPair, IntervalPair> continuousValues){
// Set<LPNContinuousPair> pairSet = continuousValues.keySet();
// // Copy the LPNContinuousPairs over
// _indexToTimerPair = new LPNTransitionPair[pairSet.size()+1];
// int count = 0; // The position in the _indexToTimerPair for the next value.
// _indexToTimerPair[count++] = LPNTransitionPair.ZERO_TIMER_PAIR;
// for(LPNContinuousPair lcPair: pairSet){
// _indexToTimerPair[count++] = lcPair;
// Arrays.sort(_indexToTimerPair);
// _matrix = new int[matrixSize()][matrixSize()];
// for(int i=1; i<dbmSize(); i++){
// setDbmEntry(i, j, value)
/**
* Gives the names of all the transitions and continuous variables that
* are represented by the zone.
* @return
* The names of the transitions and continuous variables that are
* represented by the zone.
*/
public String[] getAllNames(){
// String[] transitionNames = new String[_indexToTimerPair.length];
// transitionNames[0] = "The zero timer.";
// for(int i=1; i<transitionNames.length; i++){
// LPNTransitionPair ltPair = _indexToTimerPair[i];
// transitionNames[i] = _lpnList[ltPair.get_lpnIndex()]
// .getTransition(ltPair.get_transitionIndex()).getName();
// return transitionNames;
// Get the continuous variable names.
String[] contVar = getContVarNames();
// Get the transition names.
String[] trans = getTranNames();
// Create an array large enough for all the names.
String[] names = new String[contVar.length + trans.length + 1];
// Add the zero timer.
names[0] = "The zero timer.";
// Add the continuous variables.
for(int i=0; i<contVar.length; i++){
names[i+1] = contVar[i];
}
// Add the timers.
for(int i=0; i<trans.length; i++){
// Already the zero timer has been added and the elements of contVar.
// That's a total of 'contVar.length + 1' elements. The last index was
// thus 'contVar.length' So the first index to add to is
// 'contVar.length +1'.
names[1+contVar.length + i] = trans[i];
}
return names;
}
/**
* Get the names of the continuous variables that this zone uses.
* @return
* The names of the continuous variables that are part of this zone.
*/
public String[] getContVarNames(){
// List for accumulating the names.
ArrayList<String> contNames = new ArrayList<String>();
// Find the pairs that represent the continuous variables. Loop starts at
// i=1 since the i=0 is the zero timer.
for(int i=1; i<_indexToTimerPair.length; i++){
LPNTransitionPair ltPair = _indexToTimerPair[i];
// If the isTimer value is false, then this pair represents a continuous
// variable.
//if(!ltPair.get_isTimer()){
// If pair is LPNContinuousPair.
if(ltPair instanceof LPNContinuousPair){
// Get the LPN that this pairing references and find the name of
// the continuous variable whose index is given by this pairing.
contNames.add(_lpnList[ltPair.get_lpnIndex()]
.getContVarName(ltPair.get_transitionIndex()));
}
}
return contNames.toArray(new String[0]);
}
/**
* Gets the names of the transitions that are associated with the timers in the
* zone. Does not return the zero timer.
* @return
* The names of the transitions whose timers are in the zone except the zero
* timer.
*/
public String[] getTranNames(){
// List for accumulating the names.
ArrayList<String> transitionNames = new ArrayList<String>();
// Find the pairs that represent the transition timers.
for(int i=1; i<_indexToTimerPair.length; i++){
LPNTransitionPair ltPair = _indexToTimerPair[i];
// If the isTimer value is true, then this pair represents a timer.
//if(ltPair.get_isTimer()){
// If this is an LPNTransitionPair and not an LPNContinuousPair
if(!(ltPair instanceof LPNContinuousPair)){
// Get the LPN that this pairing references and find the name of the
// transition whose index is given by this pairing.
transitionNames.add(_lpnList[ltPair.get_lpnIndex()]
.getTransition(ltPair.get_transitionIndex()).getLabel());
}
}
return transitionNames.toArray(new String[0]);
}
/**
* Initializes the _lpnList using information from the local states.
* @param localStates
* The local states.
* @return
* The enabled transitions.
*/
private void initialize_lpnList(State[] localStates){
// Create the LPN list.
_lpnList = new LhpnFile[localStates.length];
// Get the LPNs.
for(int i=0; i<localStates.length; i++){
_lpnList[i] = localStates[i].getLpn();
}
}
/**
* Initializes the _indexToTimerPair from the local states. This includes
* adding the zero timer, the continuous variables and the set of
* enabled timers.
* @param localStates
* The local states.
* @return
* The names of the transitions stored in the _indexToTimerPair (in the same order).
*/
private void initialize_indexToTimerPair(State[] localStates){
/*
* The populating of the _indexToTimerPair is done in three stages.
* The first is to add the zero timer which is at the beginning of the zone.
* The second is to add the continuous variables. And the third is to add
* the other timers. Since the continuous variables are added before the
* timers and the variables and timers are added in the order of the LPNs,
* the elements in an accumulating list (enabledTransitionsArrayList) are
* already in order up to the elements added for a particular LPN. Thus the
* only sorting that needs to take place is the sorting for a particular LPN.
* Correspondingly, elements are first found for an LPN and sort, then added
* to the main list.
*/
// This method will also initialize the _rateZeroContinuous
_rateZeroContinuous =
new DualHashMap<LPNContAndRate, VariableRangePair>();
// This list accumulates the transition pairs (ie timers) and the continuous
// variables.
ArrayList<LPNTransitionPair> enabledTransitionsArrayList =
new ArrayList<LPNTransitionPair>();
// Put in the zero timer.
// enabledTransitionsArrayList
// .add(new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER, -1));
enabledTransitionsArrayList
.add(new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER,
LPNTransitionPair.ZERO_TIMER));
// Get the continuous variables.
for(int i=0; i<localStates.length; i++){
// Accumulates the changing continuous variables for a single LPN.
ArrayList<LPNTransitionPair> singleLPN =
new ArrayList<LPNTransitionPair>();
// Get the associated LPN.
LhpnFile lpn = localStates[i].getLpn();
// Get the continuous variables for this LPN.
String[] continuousVariables = lpn.getContVars();
// Get the variable, index map.
DualHashMap<String, Integer> variableIndex = lpn.getContinuousIndexMap();
// Find which have a nonzero rate.
for(int j=0; j<continuousVariables.length; j++){
// Get the Variables with this name.
Variable contVar = lpn.getVariable(continuousVariables[j]);
// Get the rate.
//int rate = (int) Double.parseDouble(contVar.getInitRate());
IntervalPair rate = parseRate(contVar.getInitRate());
// Get the LPN index for the variable
int lpnIndex = lpn.getLpnIndex();
// Get the index as a variable for the LPN.
int contVariableIndex = variableIndex.get(continuousVariables[j]);
// LPNTransitionPair newPair =
// new LPNTransitionPair(lpnIndex, contVariableIndex, false);
LPNContinuousPair newPair =
new LPNContinuousPair(lpnIndex, contVariableIndex,
rate.getSmallestRate());
// If the rate is non-zero, then the variables needs to be tracked
// by matrix part of the Zone.
// if(!rate.equals(new IntervalPair(0,0))){
if(newPair.getCurrentRate() != 0){
// Temporary exception guaranteeing only unit rates.
//if(rate != -1 && rate != 1){
// if(rate.get_LowerBound() != 1 && rate.get_UpperBound() != 1){
// "only supports positive unit rates. The variable " + contVar +
// " has a rate of " + rate);
// // Get the LPN index for the variable
// int lpnIndex = lpn.getLpnIndex();
// // Get the index as a variable for the LPN. This index matches
// // the index in the vector stored by platu.State.
// int contVariableIndex = variableIndex.get(continuousVariables[j]);
// The continuous variable reference.
// singleLPN.add(
// new LPNTransitionPair(lpnIndex, contVariableIndex, false));
singleLPN.add(newPair);
}
else{
// If the rate is zero, then the Zone keeps track of this variable
// in a list.
// _rateZeroContinuous.
// put(newPair, new VariableRangePair(contVar,
// parseRate(contVar.getInitValue())));
_rateZeroContinuous.
insert(new LPNContAndRate(newPair, rate),
new VariableRangePair(contVar,
parseRate(contVar.getInitValue())));
}
}
// Sort the list.
Collections.sort(singleLPN);
// Add the list to the total accumulating list.
for(int j=0; j<singleLPN.size(); j++){
enabledTransitionsArrayList.add(singleLPN.get(j));
}
}
// Get the transitions.
for(int i=0; i<localStates.length; i++){
// Extract the enabled transition vector.
boolean[] enabledTran = localStates[i].getTranVector();
// Accumulates the transition pairs for one LPN.
ArrayList<LPNTransitionPair> singleLPN = new ArrayList<LPNTransitionPair>();
// The index of the boolean value corresponds to the index of the Transition.
for(int j=0; j<enabledTran.length; j++){
if(enabledTran[j]){
// Add the transition pair.
// singleLPN.add(new LPNTransitionPair(i, j, true));
singleLPN.add(new LPNTransitionPair(i, j));
}
}
// Sort the transitions for the current LPN.
Collections.sort(singleLPN);
// Add the collection to the enabledTransitionsArrayList
for(int j=0; j<singleLPN.size(); j++){
enabledTransitionsArrayList.add(singleLPN.get(j));
}
}
// Extract out the array portion of the enabledTransitionsArrayList.
_indexToTimerPair = enabledTransitionsArrayList.toArray(new LPNTransitionPair[0]);
}
/**
* Sets the lower and upper bounds for the transitions and continuous variables.
* @param varNames
* The names of the transitions in _indexToTimerPair.
*/
private void initializeLowerUpperBounds(String[] varNames, State[] localStates){
// Traverse the entire length of the DBM sub-matrix except the zero row/column.
// This is the same length as the _indexToTimerPair.length-1. The DBM is used to
// match the idea of setting the value for each row.
for(int i=1; i<dbmSize(); i++){
// Get the current LPN and transition pairing.
LPNTransitionPair ltPair = _indexToTimerPair[i];
//int upper, lower;
IntervalPair range;
// if(!ltPair.get_isTimer()){
if(ltPair instanceof LPNContinuousPair){
// If the pairing represents a continuous variable, then the
// upper and lower bound are the initial value or infinity depending
// on whether the initial rate is positive or negative.
// If the value is a constant, then assign the upper and lower bounds
// to be constant. If the value is a range then assign the upper and
// lower bounds to be a range.
Variable v = _lpnList[ltPair.get_lpnIndex()]
.getContVar(ltPair.get_transitionIndex());
// int initialRate = (int) Double.parseDouble(v.getInitRate());
// upper = initialRate;
// lower = initialRate;
String rate = v.getInitRate();
// Parse the rate. Should be in the form of [x,y] where x
// and y are integers.
//IntervalPair range = parseRate(rate);
range = parseRate(rate);
// Set the upper and lower bound (in the matrix) for the
// continuous variables.
// TODO : Check if correct.
String contValue = v.getInitValue();
IntervalPair bound = parseRate(contValue);
// Set upper bound (DBM entry (0, x) where x is the index of the variable v).
setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR, ltPair, bound.get_UpperBound());
// Set lower bound (DBM entry (x, 0) where x is the index of the variable v).
setDbmEntryByPair(ltPair, LPNTransitionPair.ZERO_TIMER_PAIR, -1*bound.get_LowerBound());
// lower = range.get_LowerBound();
// upper = range.get_UpperBound();
}
else{
// Get the expression tree.
ExprTree delay = _lpnList[ltPair.get_lpnIndex()].getDelayTree(varNames[i]);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
_lpnList[ltPair.get_lpnIndex()]
.getAllVarsWithValuesAsString(localStates[ltPair.get_lpnIndex()].getVariableVector());
// Set the upper and lower bound.
// Passing the zone as null since it should not be needed.
// if(delay.getOp().equals("uniform")){
// IntervalPair lowerRange = delay.getLeftChild()
// .evaluateExprBound(varValues, null);
// IntervalPair upperRange = delay.getRightChild()
// .evaluateExprBound(varValues, null);
// // The lower and upper bounds should evaluate to a single
// // value. Yell if they don't.
// if(!lowerRange.singleValue() || !upperRange.singleValue()){
// "the lower or the upper bound evaluated to a range " +
// "instead of a single value.");
// range = new IntervalPair(lowerRange.get_LowerBound(),
// upperRange.get_UpperBound());
// else{
// range = delay.evaluateExprBound(varValues, null);
range = delay.evaluateExprBound(varValues, this, null);
// int upper, lower;
// if(delay.getOp().equals("uniform"))
// ExprTree lowerDelay = delay.getLeftChild();
// ExprTree upperDelay = delay.getRightChild();
// lower = (int) lowerDelay.evaluateExpr(varValues);
// upper = (int) upperDelay.evaluateExpr(varValues);
// else
// lower = (int) delay.evaluateExpr(varValues);
// upper = lower;
}
// setLowerBoundbydbmIndex(i, lower);
// setUpperBoundbydbmIndex(i, upper);
setLowerBoundbydbmIndex(i, range.get_LowerBound());
setUpperBoundbydbmIndex(i, range.get_UpperBound());
}
}
/**
* Initialize the rows and columns for the continuous variables.
*/
private void initializeRowColumnContVar(){
/*
* TODO : Describe the idea behind the following algorithm.
*/
// for(int row=2; row<_indexToTimerPair.length; row++){
// // Note: row is indexing the row of the DBM matrix.
// LPNTransitionPair ltRowPair = _indexToTimerPair[row];
// if(ltRowPair.get_isTimer()){
// // If we reached the timers, stop.
// break;
// for(int col=1; col<row; col++){
// // Note: col is indexing the column of the DBM matrix.
// // The new (row, col) entry. The entry is given by col-row<= m_(row,col). Since
// // col <= m_(0,col) (its upper bound) and -row <= m_(row,0) (the negative of its lower
// // bound), the entry is given by col-row <= m(0,col) + m_(row,0) = m_(row,col);
// int rowCol = getDbmEntry(row,0) + getDbmEntry(0, col);
// // The new (col, row) entry.
// int colRow = getDbmEntry(col, 0) + getDbmEntry(0, row);
// setDbmEntry(row, col, rowCol);
// setDbmEntry(col, row, colRow);
// The only entries that do not need to be checked are the ones where both variables
// represent timers.
for(int row=2; row<_indexToTimerPair.length; row++){
// Note: row is indexing the row of the DBM matrix.
LPNTransitionPair ltRowPair = _indexToTimerPair[row];
// if(ltRowPair.get_isTimer()){
// // If we reached the timers, stop.
// break;
for(int col=1; col<row; col++){
// Note: col is indexing the column of the DBM matrix.
LPNTransitionPair ltColPair = _indexToTimerPair[col];
// If we've reached the part of the zone involving only timers, then break out
// of this row.
// if(ltRowPair.get_isTimer() && ltColPair.get_isTimer()){
if(!(ltRowPair instanceof LPNContinuousPair) &&
!(ltColPair instanceof LPNContinuousPair)){
break;
}
// The new (row, col) entry. The entry is given by col-row<= m_(row,col). Since
// col <= m_(0,col) (its upper bound) and -row <= m_(row,0) (the negative of its lower
// bound), the entry is given by col-row <= m(0,col) + m_(row,0) = m_(row,col);
int rowCol = getDbmEntry(row,0) + getDbmEntry(0, col);
// The new (col, row) entry.
int colRow = getDbmEntry(col, 0) + getDbmEntry(0, row);
setDbmEntry(row, col, rowCol);
setDbmEntry(col, row, colRow);
}
}
}
/**
* Zero argument constructor for use in methods that create Zones where the members
* variables will be set by the method.
*/
private Zone()
{
_matrix = new int[0][0];
_indexToTimerPair = new LPNTransitionPair[0];
_hashCode = -1;
_lpnList = new LhpnFile[0];
_rateZeroContinuous = new DualHashMap<LPNContAndRate, VariableRangePair>();
}
/**
* Gets the upper bound of a Transition from the zone.
* @param t
* The transition whose upper bound is wanted.
* @return
* The upper bound of Transition t.
*/
public int getUpperBoundbyTransition(Transition t)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
// LPNTransitionPair ltPair =
// new LPNTransitionPair(lpnIndex, transitionIndex, true);
LPNTransitionPair ltPair =
new LPNTransitionPair(lpnIndex, transitionIndex);
return getUpperBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair));
}
/**
* Returns the upper bound of the continuous variable.
* @param var
* The variable of interest.
* @return
* The (0,var) entry of the zone, that is the maximum as recored by
* the zone.
*/
public int getUpperBoundbyContinuousVariable(String contVar, LhpnFile lpn){
// TODO : Finish.
// // Determine whether the variable is in the zone or rate zero.
// RangeAndPairing indexAndRange = _rateZeroContinuous.getKey(var);
// // If a RangeAndPairing is returned, then get the information from here.
// if(indexAndRange != null){
// return indexAndRange.get_range().get_UpperBound();
// // If indexAndRange is null, then try to get the value from the zone.
// int i=-1;
// for(i=0; i<_indexToTimerPair.length; i++){
// if(_indexToTimerPair[i].equals(var)){
// break;
// if(i < 0){
// + "a non-rate zero continuous variable that was not found in the "
// + "zone.");
// return getUpperBoundbydbmIndex(i);
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
//int contVarIndex = lpn.get
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
// LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
// Note : setting the rate is not necessary here since this is only
// being used as an index.
// LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex, 0);
LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex);
//Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and return the result.
if(pairing != null){
return pairing.get_range().get_UpperBound();
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the lower bound for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
//return getUpperBoundbydbmIndex(i);
return getDbmEntry(0, i);
}
public int getUpperBoundForRate(LPNTransitionPair contVar){
// TODO : finish.
// Check if the contVar is in the zone.
int i = Arrays.binarySearch(_indexToTimerPair, contVar);
if(i > 0){
// The continuous variable is in the zone.
// The upper and lower bounds are stored in the same
// place as the delays, so the same method of
// retrieval will work.
//return getUpperBoundbydbmIndex(contVar.get_transitionIndex());
// Grab the current rate from the LPNContinuousPair.
return ((LPNContinuousPair)_indexToTimerPair[i]).getCurrentRate();
}
// Assume the rate is zero. This covers the case if conVar
// is in the rate zero as well as if its not in the state at all.
return 0;
}
/**
* Get the value of the upper bound for the delay. If the index refers
* to a timer, otherwise get the upper bound for the continuous
* variables rate.
* @param index
* The timer's row/column of the DBM matrix.
* @return
* The upper bound on the transitions delay.
*/
public int getUpperBoundbydbmIndex(int index)
{
return _matrix[0][dbmIndexToMatrixIndex(index)];
}
/**
* Set the value of the upper bound for the delay.
* @param t
* The transition whose upper bound is being set.
* @param value
* The value of the upper bound.
*/
public void setUpperBoundbyTransition(Transition t, int value)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
// LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex);
setUpperBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair), value);
}
/**
* Set the value of the upper bound for the delay.
* @param index
* The timer's row/column of the DBM matrix.
* @param value
* The value of the upper bound.
*/
public void setUpperBoundbydbmIndex(int index, int value)
{
_matrix[0][dbmIndexToMatrixIndex(index)] = value;
}
/**
* Sets the upper bound for a transition described by an LPNTransitionPair.
* @param ltPair
* The index of the transition and the index of the associated LPN for
* the timer to set the upper bound.
* @param value
* The value for setting the upper bound.
*/
private void setUpperBoundByLPNTransitionPair(LPNTransitionPair ltPair, int value){
setUpperBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair), value);
}
/**
* Gets the lower bound of a Transition from the zone.
* @param t
* The transition whose upper bound is wanted.
* @return
* The lower bound of Transition t.
*/
public int getLowerBoundbyTransition(Transition t)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
// LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex);
return -1*getLowerBoundbydbmIndex(
Arrays.binarySearch(_indexToTimerPair, ltPair));
}
/**
* Returns the lower bound of the continuous variable.
* @param var
* The variable of interest.
* @return
* The (0,var) entry of the zone, that is the minimum as recored by
* the zone.
*/
public int getLowerBoundbyContinuousVariable(String contVar, LhpnFile lpn){
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
//int contVarIndex = lpn.get
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
// LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
// Note: Setting the rate is not necessary since this is only being used
// as an index.
// LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex, 0);
LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex);
//Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and return the result.
if(pairing != null){
return pairing.get_range().get_LowerBound();
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the lower bound for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
//return getLowerBoundbydbmIndex(i);
return getDbmEntry(i, 0);
}
/**
* Get the value of the lower bound for the delay if the index refers
* to a timer, otherwise get the lower bound for the continuous variables
* rate.
* @param index
* The timer's row/column of the DBM matrix.
* @return
* The value of the lower bound.
*/
public int getLowerBoundbydbmIndex(int index)
{
return _matrix[dbmIndexToMatrixIndex(index)][0];
}
public int getLowerBoundForRate(LPNTransitionPair contVar){
// TODO : finish.
// Check if the contVar is in the zone.
int i = Arrays.binarySearch(_indexToTimerPair, contVar);
if(i > 0){
// The continuous variable is in the zone.
// The upper and lower bounds are stored in the same
// place as the delays, so the same method of
// retrieval will work.
return getLowerBoundbydbmIndex(contVar.get_transitionIndex());
}
// Assume the rate is zero. This covers the case if conVar
// is in the rate zero as well as if its not in the state at all.
return 0;
}
/**
* Set the value of the lower bound for the delay.
* @param t
* The transition whose lower bound is being set.
* @param value
* The value of the lower bound.
*/
public void setLowerBoundbyTransition(Transition t, int value)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
// LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex);
setLowerBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair,ltPair), value);
}
/**
* Set the value of the lower bound for the delay.
* @param t
* The transition whose upper bound is being set.
* @param value
* The value of the upper bound.
*/
private void setLowerBoundByLPNTransitionPair(LPNTransitionPair ltPair, int value){
setLowerBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair,ltPair), value);
}
/**
* Set the value of the lower bound for the delay.
* @param index
* The timer's row/column of the DBM matrix.
* @param value
* The value of the lower bound.
*/
public void setLowerBoundbydbmIndex(int index, int value)
{
_matrix[dbmIndexToMatrixIndex(index)][0] = -1*value;
}
/**
* Give the upper and lower bounds for a continuous variable.
* @param contVar
* The variable of interest.
* @return
* The upper and lower bounds according to the Zone.
*/
public IntervalPair getContinuousBounds(String contVar, LhpnFile lpn){
/*
* Need to determine whether this is suppose to be a rate zero variable or a non-zero
* rate variable. One method is to check the rate of the passed variable. The other is
* to just check if the variable is present in either place.
*/
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
// Get the index of the continuous variable.
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
// LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
// Note: setting the current rate is not necessary here since the
// LPNContinuousPair is only being used as an index.
// LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex, 0);
LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex);
// Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous
.get(new LPNContAndRate(index, new IntervalPair(0,0)));
// If Pairing is not null, the variable was found and return the result.
if(pairing != null){
return pairing.get_range();
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the bounds for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
// Else find the upper and lower bounds.
// int lower = getLowerBoundbydbmIndex(i);
// int upper = getUpperBoundbydbmIndex(i);
int lower = (-1)*getDbmEntry(i, 0);
int upper = getDbmEntry(0, i);
return new IntervalPair(lower, upper);
}
/**
* Gets the range for the continuous variable. Values come back not
* warped.
* @param ltContPair
* The index of the variable of interest.
* @return
* The range of the continuous variable described by ltContPair.
*/
public IntervalPair getContinuousBounds(LPNContinuousPair ltContPair){
// First check in the zone.
int variableIndex = Arrays.binarySearch(_indexToTimerPair, ltContPair);
if(variableIndex < 0){
// The variable was not found in the zone. Check to see if its
// in the rate-zero varaibles. Technically I will return whatever
// is in the _rateZeroConintuous, null or not.
// return _rateZeroContinuous.get(ltContPair).get_range();
// First get an object to reference into the _rateZeroContinuous
LPNContAndRate lcr = new LPNContAndRate(ltContPair,
new IntervalPair(0,0));
return _rateZeroContinuous.get(lcr).get_range();
}
// The varaible was found in the zone. Yay.
int lower = (-1)*getDbmEntry(variableIndex, 0)
*getCurrentRate(ltContPair);
int upper = getDbmEntry(0, variableIndex)
*getCurrentRate(ltContPair);
return new IntervalPair(lower, upper);
}
/**
* Gets the range of the rate associated with a continuous variable.
* @param ltContPair
* The index of the continuous variable.
* @return
* The range of rates associated with the continuous variable indexed
* by ltContPair.
*/
public IntervalPair getRateBounds(LPNTransitionPair ltPair){
int upper;
int lower;
// Check if the ltContpair is in the zone.
int i = Arrays.binarySearch(_indexToTimerPair, ltPair);
if(i < 0){
// Then the variable is in the rate zero continuous
// variables so get the range of rates from there.
// return new IntervalPair(0,0);
// Create an object to reference into the rate zero.
LPNContAndRate lcr =
new LPNContAndRate((LPNContinuousPair) ltPair,
new IntervalPair(0,0));
// Get the old version of lcr from the rate zero since
// that contains the rate. This is quite a hack.
VariableRangePair vrp = _rateZeroContinuous.get(lcr);
lcr = _rateZeroContinuous.getKey(vrp);
return lcr.get_rateInterval();
}
upper = getUpperBoundbydbmIndex(i);
lower = -1*getLowerBoundbydbmIndex(i);
// The continuous variable is in the zone.
// The upper and lower bounds are stored in the same
// place as the delays, so the same method of
// retrieval will work.
return new IntervalPair(lower, upper);
}
/**
* Gets the lowest rate in absolute value.
* @param ltPair
* @return
*/
public int getSmallestRate(LPNTransitionPair ltPair){
int upper;
int lower;
// Check if the ltContpair is in the zone.
int i = Arrays.binarySearch(_indexToTimerPair, ltPair);
if(i < 0){
// Assume the rate is zero. This covers the case if conVar
// is in the rate zero as well as if its not in the state at all.
return 0;
}
upper = getUpperBoundbydbmIndex(i);
lower = -1*getLowerBoundbydbmIndex(i);
// If zero is a possible rate, then it is the rate to set to.
if( lower < 0 && upper > 0){
return 0;
}
// When zero is not present, use the smallest rate in absolute value.
return Math.abs(lower)<Math.abs(upper) ?
lower: upper;
}
/**
* Gets the lowerest rate in absolute value.
* @param dbmIndex
* @return
*/
public int getSmallestRate(int dbmIndex){
int lower = -1*getLowerBoundbydbmIndex(dbmIndex);
int upper = getUpperBoundbydbmIndex(dbmIndex);
if(lower < 0 && upper > 0){
return 0;
}
return Math.abs(lower)<= Math.abs(upper) ? lower : upper;
}
/**
* Sets the bounds for a continuous variable.
* @param contVar
* The continuous variable to set the bounds on.
* @param lpn
* The LhpnFile object that contains the variable.
* @param range
* The new range of the continuous variable.
*/
public void setContinuousBounds(String contVar, LhpnFile lpn,
IntervalPair range){
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
// Get the index of the continuous variable.
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
// LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
//Note : Setting the rate is not necessary since this only being used
// as an index.
// LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex, 0);
LPNContinuousPair index = new LPNContinuousPair(lpnIndex, contIndex);
// Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and make the new assignment.
if(pairing != null){
pairing.set_range(range);
return;
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the bounds for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
// Else find the upper and lower bounds.
// setLowerBoundbydbmIndex(i, range.get_LowerBound());
// setUpperBoundbydbmIndex(i, range.get_UpperBound());
setDbmEntry(i, 0, (-1)*range.get_LowerBound());
setDbmEntry(0, i, range.get_UpperBound());
}
/**
* Converts the index of the DBM to the index of _matrix.
* @param i
* The row/column index of the DBM.
* @return
* The row/column index of _matrix.
*/
private int dbmIndexToMatrixIndex(int i)
{
return i+1;
}
/**
* Retrieves an entry of the DBM using the DBM's addressing.
* @param i
* The row of the DBM.
* @param j
* The column of the DBM.
* @return
* The value of the (i, j) element of the DBM.
*/
public int getDbmEntry(int i, int j)
{
return _matrix[dbmIndexToMatrixIndex(i)][dbmIndexToMatrixIndex(j)];
}
/**
* Retrieves an entry of the DBM using LPNTransitionPair indecies.
* @param iPair
* The LPNTransitionPair for the ith entry.
* @param jPair
* The LPNTransitionPair for the jth entry.
* @return
* The value of the (i,j) element of the DBM where i corresponds to the row
* for the variable iPair and j corresponds to the row for the variable jPair.
*/
public int getDbmEntryByPair(LPNTransitionPair iPair, LPNTransitionPair jPair){
int iIndex = Arrays.binarySearch(_indexToTimerPair, iPair);
int jIndex = Arrays.binarySearch(_indexToTimerPair, jPair);
return getDbmEntry(iIndex, jIndex);
}
/**
* Sets an entry of the DBM using the DBM's addressing.
* @param i
* The row of the DBM.
* @param j
* The column of the DBM.
* @param value
* The new value for the entry.
*/
private void setDbmEntry(int i, int j, int value)
{
_matrix[dbmIndexToMatrixIndex(i)][dbmIndexToMatrixIndex(j)] = value;
}
/**
* Sets the entry in the DBM using the LPNTransitionPair indexing.
* @param row
* The LPNTransitionPair for the row.
* @param col
* The LPNTransitionPair for the column.
* @param value
* The value to set the entry to.
*/
private void setDbmEntryByPair(LPNTransitionPair row, LPNTransitionPair col, int value){
// The row index.
int i = timerIndexToDBMIndex(row);
// The column index.
int j = timerIndexToDBMIndex(col);
setDbmEntry(i, j, value);
}
/**
* Returns the index of the the transition in the DBM given a LPNTransitionPair pairing
* the transition index and associated LPN index.
* @param ltPair
* The pairing comprising the index of the transition and the index of the associated
* LPN.
* @return
* The row/column of the DBM associated with the ltPair.
*/
public int timerIndexToDBMIndex(LPNTransitionPair ltPair)
{
return Arrays.binarySearch(_indexToTimerPair, ltPair);
}
/**
* The matrix labeled with 'ti' where i is the transition index associated with the timer.
*/
public String toString()
{
// TODO : Fix the handling of continuous variables in the
// _lpnList == 0 case.
String result = "Timer and delay or continuous and ranges.\n";
int count = 0;
// Print the timers.
for(int i=1; i<_indexToTimerPair.length; i++, count++)
{
if(_lpnList.length == 0)
{
// If no LPN's are associated with this Zone, use the index of the timer.
result += " t" + _indexToTimerPair[i].get_transitionIndex() + " : ";
}
else
{
String name;
// If the current LPNTransitionPair is a timer, get the name
// from the transitions.
// if(_indexToTimerPair[i].get_isTimer()){
// If the current timer is an LPNTransitionPair and not an LPNContinuousPair
if(!(_indexToTimerPair[i] instanceof LPNContinuousPair)){
// Get the name of the transition.
Transition tran = _lpnList[_indexToTimerPair[i].get_lpnIndex()].
getTransition(_indexToTimerPair[i].get_transitionIndex());
name = tran.getLabel();
}
else{
// If the current LPNTransitionPair is not a timer, get the
// name as a continuous variable.
Variable var = _lpnList[_indexToTimerPair[i].get_lpnIndex()]
.getContVar(_indexToTimerPair[i].get_transitionIndex());
LPNContinuousPair lcPair =
(LPNContinuousPair) _indexToTimerPair[i];
name = var.getName() +
":[" + -1*getDbmEntry(i, 0)*lcPair.getCurrentRate() + ","
+ getDbmEntry(0, i)*lcPair.getCurrentRate() + "]\n" +
" Current Rate: " + lcPair.getCurrentRate() + " " +
"rate:";
}
// result += " " + tran.getName() + ":";
result += " " + name + ":";
}
result += "[ " + -1*getLowerBoundbydbmIndex(i) + ", " + getUpperBoundbydbmIndex(i) + " ]";
if(count > 9)
{
result += "\n";
count = 0;
}
}
if(!_rateZeroContinuous.isEmpty()){
result += "\nRate Zero Continuous : \n";
for (LPNContAndRate lcrPair : _rateZeroContinuous.keySet()){
result += "" + _rateZeroContinuous.get(lcrPair)
+ "Rate: " + lcrPair.get_rateInterval();
}
}
result += "\nDBM\n";
// Print the DBM.
for(int i=0; i<_indexToTimerPair.length; i++)
{
result += "| " + String.format("%3d", getDbmEntry(i, 0));
for(int j=1; j<_indexToTimerPair.length; j++)
{
result += ", " + String.format("%3d",getDbmEntry(i, j));
}
result += " |\n";
}
return result;
}
/**
* Tests for equality. Overrides inherited equals method.
* @return True if o is equal to this object, false otherwise.
*/
public boolean equals(Object o)
{
// Check if the reference is null.
if(o == null)
{
return false;
}
// Check that the type is correct.
if(!(o instanceof Zone))
{
return false;
}
// Check for equality using the Zone equality.
return equals((Zone) o);
}
/**
* Tests for equality.
* @param
* The Zone to compare.
* @return
* True if the zones are non-null and equal, false otherwise.
*/
public boolean equals(Zone otherZone)
{
// Check if the reference is null first.
if(otherZone == null)
{
return false;
}
// Check for reference equality.
if(this == otherZone)
{
return true;
}
// If the hash codes are different, then the objects are not equal.
if(this.hashCode() != otherZone.hashCode())
{
return false;
}
// Check if the they have the same number of timers.
if(this._indexToTimerPair.length != otherZone._indexToTimerPair.length){
return false;
}
// Check if the timers are the same.
for(int i=0; i<this._indexToTimerPair.length; i++){
if(!(this._indexToTimerPair[i].equals(otherZone._indexToTimerPair[i]))){
return false;
}
}
// Check if the matrix is the same
for(int i=0; i<_matrix.length; i++)
{
for(int j=0; j<_matrix[0].length; j++)
{
if(!(this._matrix[i][j] == otherZone._matrix[i][j]))
{
return false;
}
}
}
return true;
}
/**
* Determines if this zone is a subset of Zone otherZone.
* @param otherZone
* The zone to compare against.
* @return
* True if this is a subset of other; false otherwise.
*/
public boolean subset(Zone otherZone){
// Check if the reference is null first.
if(otherZone == null)
{
return false;
}
// Check for reference equality.
if(this == otherZone)
{
return true;
}
// Check if the the same number of timers are present.
if(this._indexToTimerPair.length != otherZone._indexToTimerPair.length){
return false;
}
// Check if the transitions are the same.
for(int i=0; i<this._indexToTimerPair.length; i++){
if(!(this._indexToTimerPair[i].equals(otherZone._indexToTimerPair[i]))){
return false;
}
}
// Check if the entries of this Zone are less than or equal to the entries
// of the other Zone.
for(int i=0; i<_matrix.length; i++)
{
for(int j=0; j<_matrix[0].length; j++)
{
if(!(this._matrix[i][j] <= otherZone._matrix[i][j])){
return false;
}
}
}
return true;
}
/**
* Determines if this zone is a superset of Zone otherZone.
* @param otherZone
* The zone to compare against.
* @return
* True if this is a subset of other; false otherwise. More specifically it
* gives the result of otherZone.subset(this). Thus it agrees with the subset method.
*/
public boolean superset(Zone otherZone){
return otherZone.subset(this);
}
/**
* Overrides the hashCode.
*/
public int hashCode()
{
// Check if the hash code has been set.
if(_hashCode <0)
{
_hashCode = createHashCode();
}
return _hashCode;
}
/**
* Creates a hash code for a Zone object.
* @return
* The hash code.
*/
private int createHashCode()
{
int newHashCode = Arrays.hashCode(_indexToTimerPair);
for(int i=0; i<_matrix.length; i++)
{
newHashCode ^= Arrays.hashCode(_matrix[i]);
}
return Math.abs(newHashCode);
}
/**
* The size of the DBM sub matrix. This is calculated using the size of _indexToTimer.
* @return
* The size of the DBM.
*/
private int dbmSize()
{
return _indexToTimerPair.length;
}
/**
* The size of the matrix.
* @return
* The size of the matrix. This is calculated using the size of _indexToTimer.
*/
private int matrixSize()
{
return _indexToTimerPair.length + 1;
}
/**
* Performs the Floyd's least pairs algorithm to reduce the DBM.
*/
public void recononicalize()
{
for(int k=0; k<dbmSize(); k++)
{
for (int i=0; i<dbmSize(); i++)
{
for(int j=0; j<dbmSize(); j++)
{
if(getDbmEntry(i, k) != INFINITY && getDbmEntry(k, j) != INFINITY
&& getDbmEntry(i, j) > getDbmEntry(i, k) + getDbmEntry(k, j))
{
setDbmEntry(i, j, getDbmEntry(i, k) + getDbmEntry(k, j));
}
if( (i==j) && getDbmEntry(i, j) != 0)
{
throw new DiagonalNonZeroException("Entry (" + i + ", " + j + ")" +
" became " + getDbmEntry(i, j) + ".");
}
}
}
}
}
/**
* Determines if a timer associated with a given transitions has reached its lower bound.
* @param t
* The transition to consider.
* @return
* True if the timer has reached its lower bound, false otherwise.
*/
public boolean exceedsLowerBoundbyTransitionIndex(Transition t)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
// LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex);
return exceedsLowerBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair));
}
/**
* Determines if a timer has reached its lower bound.
* @param timer
* The timer's index.
* @return
* True if the timer has reached its lower bound, false otherwise.
*/
public boolean exceedsLowerBoundbydbmIndex(int index)
{
// Note : Make sure that the lower bound is stored as a negative number
// and that the inequality is correct.
return _matrix[0][dbmIndexToMatrixIndex(index)] <=
_matrix[1][dbmIndexToMatrixIndex(index)];
}
/* (non-Javadoc)
* @see verification.timed_state_exploration.zone.Zone#fireTransitionbyTransitionIndex(int, int[], verification.platu.stategraph.State)
*/
// public Zone fireTransitionbyTransitionIndex(int timer, int[] enabledTimers,
// State state)
// // TODO: Check if finish.
// int index = Arrays.binarySearch(_indexToTimer, timer);
// //return fireTransitionbydbmIndex(Arrays.binarySearch(_indexToTimer, timer),
// //enabledTimers, state);
// // Check if the value is in this zone to fire.
// if(index < 0){
// return this;
// return fireTransitionbydbmIndex(index, enabledTimers, state);
/**
* Gives the Zone obtained by firing a given Transitions.
* @param t
* The transitions being fired.
* @param enabledTran
* The list of currently enabled Transitions.
* @param localStates
* The current local states.
* @return
* The Zone obtained by firing Transition t with enabled Transitions enabled
* enabledTran when the current state is localStates.
*/
// public Zone fire(Transition t, LpnTranList enabledTran, ArrayList<HashMap<LPNContAndRate, IntervalPair>> newAssignValues,
// State[] localStates){
// public Zone fire(Transition t, LpnTranList enabledTran,
// ArrayList<UpdateContinuous> newAssignValues,
// State[] localStates){
public Zone fire(Transition t, LpnTranList enabledTran,
ContinuousRecordSet newAssignValues,
State[] localStates){
try {
if(_writeLogFile != null){
_writeLogFile.write(t.toString());
_writeLogFile.newLine();
}
} catch (IOException e) {
e.printStackTrace();
}
// Create the LPNTransitionPair to check if the Transitions is in the zone and to
// find the index.
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex);
int dbmIndex = Arrays.binarySearch(_indexToTimerPair, ltPair);
if(dbmIndex <= 0){
return this;
}
// Get the new zone portion.
Zone newZone = fireTransitionbydbmIndexNew(dbmIndex, enabledTran, localStates,
newAssignValues);
// Update any assigned continuous variables.
//newZone.updateContinuousAssignment(newAssignValues);
// Set all the rates to their lower bound.
newZone.setAllToLowerBoundRate();
// Warp the Zone
newZone.dbmWarp(this);
// Warping can wreck the newly assigned values so correct them.
newZone.correctNewAssignemnts(newAssignValues);
newZone.recononicalize();
newZone.advance(localStates);
// Recanonicalize
newZone.recononicalize();
newZone.checkZoneMaxSize();
return newZone;
}
/**
* Updates the Zone according to the transition firing.
* @param index
* The index of the timer.
* @param newContValue
* @return
* The updated Zone.
*/
public Zone fireTransitionbydbmIndex(int index, LpnTranList enabledTimers,
State[] localStates,
ArrayList<HashMap<LPNContAndRate, IntervalPair>> newAssignValues)
// public Zone fireTransitionbydbmIndex(int index, LpnTranList enabledTimers,
// State[] localStates,
//// ArrayList<UpdateContinuous> newAssignValues)
// public Zone fireTransitionbydbmIndex(int index, LpnTranList enabledTimers,
// State[] localStates,
// ContinuousRecordSet newAssignValues)
{
/*
* For the purpose of adding the newly enabled transitions and removing
* the disable transitions, the continuous variables that still have
* a nonzero rate can be treated like still enbaled timers.
*/
// Initialize the zone.
Zone newZone = new Zone();
// These sets will defferentiate between the new timers and the
// old timers, that is between the timers that are not already in the
// zone and those that are already in the zone..
HashSet<LPNTransitionPair> newTimers = new HashSet<LPNTransitionPair>();
HashSet<LPNTransitionPair> oldTimers = new HashSet<LPNTransitionPair>();
// Copy the LPNs over.
newZone._lpnList = new LhpnFile[this._lpnList.length];
for(int i=0; i<this._lpnList.length; i++){
newZone._lpnList[i] = this._lpnList[i];
}
// copyRatesNew(newZone, enabledTimers, newAssignValues);
HashMap<LPNContAndRate, IntervalPair> oldNonZero = newAssignValues.get(3);
// Add the continuous variables to the enabled timers.
for(int i=1; _indexToTimerPair[i] instanceof LPNContinuousPair; i++){
// For the purpose of addigng continuous variables to the zone
// consider an oldNonZero continuous variable as new.
if(oldNonZero.containsKey(_indexToTimerPair[i])){
continue;
}
oldTimers.add(_indexToTimerPair[i]);
}
for(int i=0; i<newZone._indexToTimerPair.length; i++)
{
// Determine if each value is a new timer or old.
if(Arrays.binarySearch(this._indexToTimerPair, newZone._indexToTimerPair[i])
>= 0 )
{
// The timer was already present in the zone.
oldTimers.add(newZone._indexToTimerPair[i]);
}
else
{
// The timer is a new timer.
newTimers.add(newZone._indexToTimerPair[i]);
}
}
// Create the new matrix.
newZone._matrix = new int[newZone.matrixSize()][newZone.matrixSize()];
// TODO: For simplicity, make a copy of the current zone and perform the
// restriction and re-canonicalization. Later add a copy re-canonicalization
// that does the steps together.
Zone tempZone = this.clone();
tempZone.restrictTimer(index);
tempZone.recononicalize();
// Copy the tempZone to the new zone.
for(int i=0; i<tempZone.dbmSize(); i++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
continue;
}
// Get the new index of for the timer.
int newIndexi = i==0 ? 0 :
Arrays.binarySearch(newZone._indexToTimerPair, tempZone._indexToTimerPair[i]);
for(int j=0; j<tempZone.dbmSize(); j++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[j]))
{
continue;
}
int newIndexj = j==0 ? 0 :
Arrays.binarySearch(newZone._indexToTimerPair, tempZone._indexToTimerPair[j]);
newZone._matrix[newZone.dbmIndexToMatrixIndex(newIndexi)]
[newZone.dbmIndexToMatrixIndex(newIndexj)]
= tempZone.getDbmEntry(i, j);
}
}
// Copy the upper and lower bounds.
for(int i=1; i<tempZone.dbmSize(); i++)
{
// The block copies the upper and lower bound information from the
// old zone. Thus we do not consider anything that is not an old
// timer. Furthermore, oldNonZero represent
if(!oldTimers.contains(tempZone._indexToTimerPair[i])
&& !oldNonZero.containsKey(_indexToTimerPair[i]))
{
continue;
}
newZone.setLowerBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
-1*tempZone.getLowerBoundbydbmIndex(i));
// The minus sign is because _matrix stores the negative of the lower bound.
newZone.setUpperBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
tempZone.getUpperBoundbydbmIndex(i));
}
// Copy in the new relations for the new timers.
for(LPNTransitionPair timerNew : newTimers)
{
for(LPNTransitionPair timerOld : oldTimers)
{
newZone.setDbmEntry(newZone.timerIndexToDBMIndex(timerNew),
newZone.timerIndexToDBMIndex(timerOld),
tempZone.getDbmEntry(0, tempZone.timerIndexToDBMIndex(timerOld)));
newZone.setDbmEntry(newZone.timerIndexToDBMIndex(timerOld),
newZone.timerIndexToDBMIndex(timerNew),
tempZone.getDbmEntry(tempZone.timerIndexToDBMIndex(timerOld), 0));
}
}
// Set the upper and lower bounds for the new timers.
for(LPNTransitionPair pair : newTimers){
// Get all the upper and lower bounds for the new timers.
// Get the name for the timer in the i-th column/row of DBM
//String tranName = indexToTran.get(i).getName();
String tranName = _lpnList[pair.get_lpnIndex()]
.getTransition(pair.get_transitionIndex()).getLabel();
ExprTree delay = _lpnList[pair.get_lpnIndex()].getDelayTree(tranName);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
_lpnList[pair.get_lpnIndex()]
.getAllVarsWithValuesAsString(localStates[pair.get_lpnIndex()].getVariableVector());
// Set the upper and lower bound.
int upper, lower;
if(delay.getOp().equals("uniform"))
{
IntervalPair lowerRange = delay.getLeftChild()
.evaluateExprBound(varValues, null, null);
IntervalPair upperRange = delay.getRightChild()
.evaluateExprBound(varValues, null, null);
// The lower and upper bounds should evaluate to a single
// value. Yell if they don't.
if(!lowerRange.singleValue() || !upperRange.singleValue()){
throw new IllegalStateException("When evaulating the delay, " +
"the lower or the upper bound evaluated to a range " +
"instead of a single value.");
}
lower = lowerRange.get_LowerBound();
upper = upperRange.get_UpperBound();
}
else
{
IntervalPair range = delay.evaluateExprBound(varValues, this, null);
lower = range.get_LowerBound();
upper = range.get_UpperBound();
}
newZone.setLowerBoundByLPNTransitionPair(pair, lower);
newZone.setUpperBoundByLPNTransitionPair(pair, upper);
}
//newZone.advance();
// Advance time.
// newZone.advance(localStates);
// Recanonicalize.
// newZone.recononicalize();
// newZone.checkZoneMaxSize();
return newZone;
}
public Zone fireTransitionbydbmIndexNew(int index, LpnTranList enabledTimers,
State[] localStates,
ContinuousRecordSet newAssignValues)
{
/*
* For the purpose of adding the newly enabled transitions and removing
* the disable transitions, the continuous variables that still have
* a nonzero rate can be treated like still enbaled timers.
*/
// Initialize the zone.
Zone newZone = new Zone();
// These sets will defferentiate between the new timers and the
// old timers, that is between the timers that are not already in the
// zone and those that are already in the zone..
HashSet<LPNTransitionPair> newTimers = new HashSet<LPNTransitionPair>();
HashSet<LPNTransitionPair> oldTimers = new HashSet<LPNTransitionPair>();
// Copy the LPNs over.
newZone._lpnList = new LhpnFile[this._lpnList.length];
for(int i=0; i<this._lpnList.length; i++){
newZone._lpnList[i] = this._lpnList[i];
}
copyRatesNew(newZone, enabledTimers, newAssignValues);
// HashMap<LPNContAndRate, IntervalPair> oldNonZero = newAssignValues.get(3);
// Add the continuous variables to the enabled timers.
// for(int i=1; _indexToTimerPair[i] instanceof LPNContinuousPair; i++){
// // For the purpose of addigng continuous variables to the zone
// // consider an oldNonZero continuous variable as new.
// if(oldNonZero.containsKey(_indexToTimerPair[i])){
// continue;
// oldTimers.add(_indexToTimerPair[i]);
for(int i=0; i<newZone._indexToTimerPair.length; i++)
{
// Handle the continuous variables portion.
if(newZone._indexToTimerPair[i] instanceof LPNContinuousPair){
LPNContinuousPair lcPair =
(LPNContinuousPair) newZone._indexToTimerPair[i];
// Get the record
UpdateContinuous continuousState =
newAssignValues.get(lcPair);
if(continuousState != null && (continuousState.is_newValue() ||
continuousState.newlyNonZero())){
// In the first case a new value has been assigned, so
// consider the continuous variable a 'new' variable for
// the purposes of copying relations from the previous zone.
newTimers.add(newZone._indexToTimerPair[i]);
continue;
}
// At this point, either the continuous variable was not present
// in the newAssignValues or it is in the newAssignValues and
// satisfies the following: it already had a non-zero rate, is
// being assigned another non-zero rate, and is not being assigned
// a new value. This is becuase the field _indexToTimerPair only
// deals with non-zero rates, so the variable must have a non-zero
// rate. Furthermore the if statement takes care of the cases
// when the rate changed from zero to non-zero and/or a new value
// has been assigned.
// In either of the cases, we consider the variable an 'old' variable
// for the purpose of copying the previous zone information.
oldTimers.add(newZone._indexToTimerPair[i]);
}
// At this point, the varaible represents a transition (timer).
// So determine whether this timer is new or old.
else if(Arrays.binarySearch(this._indexToTimerPair,
newZone._indexToTimerPair[i]) >= 0 )
{
// The timer was already present in the zone.
oldTimers.add(newZone._indexToTimerPair[i]);
}
else
{
// The timer is a new timer.
newTimers.add(newZone._indexToTimerPair[i]);
}
}
// Create the new matrix.
newZone._matrix = new int[newZone.matrixSize()][newZone.matrixSize()];
// TODO: For simplicity, make a copy of the current zone and perform the
// restriction and re-canonicalization. Later add a copy re-canonicalization
// that does the steps together.
Zone tempZone = this.clone();
tempZone.restrictTimer(index);
tempZone.recononicalize();
// Copy the tempZone to the new zone.
for(int i=0; i<tempZone.dbmSize(); i++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
continue;
}
// Get the new index of for the timer.
int newIndexi = i==0 ? 0 :
Arrays.binarySearch(newZone._indexToTimerPair,
tempZone._indexToTimerPair[i]);
for(int j=0; j<tempZone.dbmSize(); j++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[j]))
{
continue;
}
int newIndexj = j==0 ? 0 :
Arrays.binarySearch(newZone._indexToTimerPair,
tempZone._indexToTimerPair[j]);
newZone._matrix[newZone.dbmIndexToMatrixIndex(newIndexi)]
[newZone.dbmIndexToMatrixIndex(newIndexj)]
= tempZone.getDbmEntry(i, j);
}
}
// Copy the upper and lower bounds.
for(int i=1; i<tempZone.dbmSize(); i++)
{
// The block copies the upper and lower bound information from the
// old zone. Thus we do not consider anything that is not an old
// timer.
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
// A hack to ensure that the newly zero varaibles
// get the new values from the tempZone.
if(tempZone._indexToTimerPair[i] instanceof LPNContinuousPair){
LPNContinuousPair lcPair =
(LPNContinuousPair) tempZone._indexToTimerPair[i];
VariableRangePair vrp = newZone._rateZeroContinuous
.get(new LPNContAndRate(lcPair));
if(vrp != null){
// This means that the continuous varaible was non-zero
// and is now zero. Fix up the values according to
// the temp zone.
IntervalPair newRange = tempZone.getContinuousBounds(lcPair);
vrp.set_range(newRange);
}
}
continue;
}
if(_indexToTimerPair[i] instanceof LPNContinuousPair){
LPNContinuousPair lcPair = (LPNContinuousPair) _indexToTimerPair[i];
// Check if a rate assignment has occured for any continuous
// variables.
UpdateContinuous updateRecord =
newAssignValues.get(lcPair);
if(updateRecord != null){
// Since the variable is in the oldTimers, it cannot have had
// a new value assigned to it. It must have had a new rate assignment
IntervalPair rates = updateRecord.get_lcrPair().get_rateInterval();
IntervalPair values = updateRecord.get_Value();
// Copy the new rate information
newZone.setLowerBoundByLPNTransitionPair(_indexToTimerPair[i],
rates.get_LowerBound());
newZone.setUpperBoundByLPNTransitionPair(_indexToTimerPair[i],
rates.get_UpperBound());
// Copy the smallest and greatest continuous value.
// newZone.setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR,
// _indexToTimerPair[i], -1*values.get_LowerBound());
// newZone.setDbmEntryByPair(_indexToTimerPair[i],
// LPNTransitionPair.ZERO_TIMER_PAIR, values.get_UpperBound());
continue;
}
}
newZone.setLowerBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
-1*tempZone.getLowerBoundbydbmIndex(i));
// The minus sign is because _matrix stores the negative of the lower bound.
newZone.setUpperBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
tempZone.getUpperBoundbydbmIndex(i));
}
// Copy in the new relations for the new timers.
for(LPNTransitionPair timerNew : newTimers)
{
for(LPNTransitionPair timerOld : oldTimers)
{
newZone.setDbmEntry(newZone.timerIndexToDBMIndex(timerNew),
newZone.timerIndexToDBMIndex(timerOld),
tempZone.getDbmEntry(0, tempZone.timerIndexToDBMIndex(timerOld)));
newZone.setDbmEntry(newZone.timerIndexToDBMIndex(timerOld),
newZone.timerIndexToDBMIndex(timerNew),
tempZone.getDbmEntry(tempZone.timerIndexToDBMIndex(timerOld), 0));
}
}
// Set the upper and lower bounds for the new timers.
for(LPNTransitionPair pair : newTimers){
// Handle continuous case
if(pair instanceof LPNContinuousPair){
LPNContinuousPair lcPair = (LPNContinuousPair) pair;
// If a continuous variable is in the newTimers, then an assignment
// to the variable must have occurred. So get the value.
UpdateContinuous updateRecord = newAssignValues.get(lcPair);
if(updateRecord == null){
throw new IllegalStateException("The pair " + pair
+ "was not in the new assigned values but was sorted as "
+ "a new value.");
}
IntervalPair rates = updateRecord.get_lcrPair().get_rateInterval();
IntervalPair values = updateRecord.get_Value();
newZone.setLowerBoundByLPNTransitionPair(lcPair,
rates.get_LowerBound());
newZone.setUpperBoundByLPNTransitionPair(lcPair,
rates.get_UpperBound());
// Get the current rate.
int currentRate = lcPair.getCurrentRate();
if(currentRate>= 0){
// // Copy the smallest and greatest continuous value.
// newZone.setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR,
// lcPair, -1*values.get_LowerBound());
// newZone.setDbmEntryByPair(lcPair,
// LPNTransitionPair.ZERO_TIMER_PAIR,
// values.get_UpperBound());
// Copy the smallest and greatest continuous value.
newZone.setDbmEntryByPair(lcPair,
LPNTransitionPair.ZERO_TIMER_PAIR,
ContinuousUtilities.chkDiv(-1*values.get_LowerBound(),
currentRate, true));
newZone.setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR,
lcPair,
ContinuousUtilities.chkDiv(values.get_UpperBound(),
currentRate, true));
}
else{
// Copy the smallest and greatest continuous value.
// For negative rates, the upper and lower bounds need
// to be switched.
newZone.setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR,
lcPair,
ContinuousUtilities.chkDiv(-1*values.get_LowerBound(),
currentRate, true));
newZone.setDbmEntryByPair(lcPair,
LPNTransitionPair.ZERO_TIMER_PAIR,
ContinuousUtilities.chkDiv(values.get_UpperBound(),
currentRate, true));
}
continue;
}
// Get all the upper and lower bounds for the new timers.
// Get the name for the timer in the i-th column/row of DBM
String tranName = _lpnList[pair.get_lpnIndex()]
.getTransition(pair.get_transitionIndex()).getLabel();
ExprTree delay = _lpnList[pair.get_lpnIndex()].getDelayTree(tranName);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
_lpnList[pair.get_lpnIndex()]
.getAllVarsWithValuesAsString(localStates[pair.get_lpnIndex()].getVariableVector());
// Set the upper and lower bound.
int upper, lower;
if(delay.getOp().equals("uniform"))
{
IntervalPair lowerRange = delay.getLeftChild()
.evaluateExprBound(varValues, null, null);
IntervalPair upperRange = delay.getRightChild()
.evaluateExprBound(varValues, null, null);
// The lower and upper bounds should evaluate to a single
// value. Yell if they don't.
if(!lowerRange.singleValue() || !upperRange.singleValue()){
throw new IllegalStateException("When evaulating the delay, " +
"the lower or the upper bound evaluated to a range " +
"instead of a single value.");
}
lower = lowerRange.get_LowerBound();
upper = upperRange.get_UpperBound();
}
else
{
IntervalPair range = delay.evaluateExprBound(varValues, this, null);
lower = range.get_LowerBound();
upper = range.get_UpperBound();
}
newZone.setLowerBoundByLPNTransitionPair(pair, lower);
newZone.setUpperBoundByLPNTransitionPair(pair, upper);
}
//Erase relationships for continuous variables that have had new values
// assigned to them or a new non-rate zero value.
for(int i = 1; i<newZone._indexToTimerPair.length &&
newZone._indexToTimerPair[i] instanceof LPNContinuousPair; i++){
LPNContinuousPair lcPair = (LPNContinuousPair) newZone._indexToTimerPair[i];
// Get the update variable.
UpdateContinuous update = newAssignValues.get(lcPair);
if(update != null && (update.is_newValue() || update.newlyNonZero())){
for(int j=1; j<newZone._indexToTimerPair.length; j++){
if (j==i){
continue;
}
else{
newZone.setDbmEntry(i, j, Zone.INFINITY);
newZone.setDbmEntry(j, i, Zone.INFINITY);
}
}
}
}
//newZone.advance();
// Advance time.
// newZone.advance(localStates);
// Recanonicalize.
// newZone.recononicalize();
// newZone.checkZoneMaxSize();
return newZone;
}
public void correctNewAssignemnts(ContinuousRecordSet newAssignValues){
//Erase relationships for continuous variables that have had new values
// assigned to them or a new non-rate zero value.
for(int i = 1; i<this._indexToTimerPair.length &&
this._indexToTimerPair[i] instanceof LPNContinuousPair; i++){
LPNContinuousPair lcPair = (LPNContinuousPair) this._indexToTimerPair[i];
// Get the update variable.
UpdateContinuous update = newAssignValues.get(lcPair);
if(update != null && (update.is_newValue() || update.newlyNonZero())){
IntervalPair values = update.get_Value();
int currentRate = lcPair.getCurrentRate();
// Correct the upper and lower bounds.
if(lcPair.getCurrentRate()>0){
setDbmEntry(i, 0,
ContinuousUtilities.chkDiv(-1*values.get_LowerBound(),
currentRate, true));
setDbmEntry(0,i,
ContinuousUtilities.chkDiv(values.get_UpperBound(),
currentRate, true));
}
else{
setDbmEntry(i,0,
ContinuousUtilities.chkDiv(values.get_UpperBound(),
currentRate, true));
setDbmEntry(0, i,
ContinuousUtilities.chkDiv(-1*values.get_LowerBound(),
currentRate, true));
}
// Erase the relationships.
for(int j=1; j<this._indexToTimerPair.length; j++){
if (j==i){
continue;
}
else{
this.setDbmEntry(i, j, Zone.INFINITY);
this.setDbmEntry(j, i, Zone.INFINITY);
}
}
}
}
}
/**
* This fire method fires a rate change event.
*
* @param ltPair
* The index of the continuous variable whose rate needs to be changed.
* @param rate
* The new rate.
* @return
* The new zone resulting from the rate change.
*/
public Zone fire(LPNTransitionPair ltPair, int rate){
// Make a copy of the Zone.
Zone resultZone = this.clone();
// Change the current rate of the continuous variable.
setCurrentRate(ltPair, rate);
// Warp the zone.
resultZone.dbmWarp(this);
// Recanonicalize.
// resultZone.recononicalize();
// resultZone.checkZoneMaxSize();
return resultZone;
}
/**
* Handles the moving in and out of continuous variables.
* @param newContValues
*/
// private void copyRates(Zone newZone,
// HashMap<LPNContinuousPair, IntervalPair> newZeroContValues,
// HashMap<LPNContinuousPair, IntervalPair> newNonZeroContValues){
// newZone._rateZeroContinuous = new DualHashMap<LPNTransitionPair, VariableRangePair>();
// // Copy the zero rate variables over if they are still rate zero.
// for(Entry<LPNTransitionPair, VariableRangePair> entry : _rateZeroContinuous.entrySet()){
// LPNContinuousPair thePair = (LPNContinuousPair) entry.getKey();
// VariableRangePair rangeValue = entry.getValue();
// // Check if the pairing is in the newNonZeroContValues.
// IntervalPair interval = newNonZeroContValues.get(thePair);
// if(interval == null){
// // Interval being null indicates that the key was not
// // found.
// newZone._rateZeroContinuous.put(thePair, rangeValue);
/**
* Handles the moving of the continuous variables in and out of the
* _rateZeroContinuous. This includes the adding of all rate zero (new and old)
* cotninuous variables to the _rateZeroContinuous, and creating the
* _indexToTimerPair and populating it.
* @param newZone The Zone being constructed.
* @param enabled The list of enabled transitions.
* The enabled transitions.
* @param newAssignValues The list of continuous variable update information.
*/
// private void copyRates(Zone newZone, LpnTranList enabledTran,
// ArrayList<HashMap<LPNContAndRate, IntervalPair>> newAssignValues){
// private void copyRates(Zone newZone, LpnTranList enabledTran,
// ArrayList<UpdateContinuous> newAssignValues){
private void copyRates(Zone newZone, LpnTranList enabledTran,
ContinuousRecordSet newAssignValues){
/*
* The newAssignValues is an ArrayList of four sets.
* 0. Rate zero gets zero assigned.
* 1. Rate zero gets non-zero rate assigned.
* 2. Non-zero gets zero rate assigned.
* 3. Non-zero gets non-zero rate assigned.
*/
// final int OLD_ZERO = 0; // Case 0 in description.
// final int NEW_NON_ZERO = 1; // Case 1 in description.
// final int NEW_ZERO = 2; // Case 2 in description.
// final int OLD_NON_ZERO = 3; // Case 3 in description. Isn't used.
// HashMap<LPNContAndRate, IntervalPair> oldRateZero =
// newAssignValues.get(OLD_ZERO);
// HashMap<LPNContAndRate, IntervalPair> newNonZeroRate =
// newAssignValues.get(NEW_NON_ZERO);
// HashMap<LPNContAndRate, IntervalPair> newRateZero =
// newAssignValues.get(NEW_ZERO);
// HashMap<LPNContAndRate, IntervalPair> oldNonZero =
// newAssignValues.get(OLD_NON_ZERO);
// Create new rate zero member variable.
newZone._rateZeroContinuous = new DualHashMap<LPNContAndRate,
VariableRangePair>();
// Create new _indexToTimerPair.
// First get the total number of non-zero rate continuous variables that
// are present in the old zone.
int totalContinuous = 0;
for(int i=0; i<_lpnList.length; i++){
totalContinuous += _lpnList[i].getTotalNumberOfContVars();
}
int numberNonZero = totalContinuous - _rateZeroContinuous.size();
// The size is given by
// total number of transitions
// + number of non-zero rate continuous variables previously in the zone
// + number of zero rate continuous variables that now have non-zero
// - number of non-zero rate continuous variables that are now zero
// + 1 for the zero timer.
// int newSize = enabledTran.size()
// + numberNonZero + newAssignValues.get(NEW_NON_ZERO).size()
// - newAssignValues.get(NEW_ZERO).size() + 1;
// TODO: Create an object that stores the records along with this information.
int newNonZero = 0, newZero = 0;
for(UpdateContinuous record : newAssignValues.keySet()){
if(record.newlyNonZero()){
newNonZero++;
}
if(record.newlyZero()){
newZero++;
}
}
int newSize = enabledTran.size() + numberNonZero + newNonZero - newZero + 1;
// Create the timer array.
newZone._indexToTimerPair = new LPNTransitionPair[newSize];
// Add in the zero timer.
newZone._indexToTimerPair[0] = LPNTransitionPair.ZERO_TIMER_PAIR;
// Copy over the rate zero conintinuous variables.
// First copy over all the continuous variables that still have
// rate zero.
// for(LPNTransitionPair ltTranPair : _rateZeroContinuous.keySet()){
// // Cast the index.
// LPNContinuousPair ltContPair = (LPNContinuousPair) ltTranPair;
// if(newNonZeroRate.containsKey(ltContPair)){
// // The variable no longer is rate zero, so do nothing.
// continue;
// // If the value has had an assignment, use the new values instead.
// if(oldRateZero.containsKey(new LPNContAndRate(ltContPair))){
// // Create the new VariableRangePair to add.
// Variable v = _lpnList[ltContPair.get_lpnIndex()]
// .getContVar(ltContPair.get_ContinuousIndex());
// VariableRangePair vrp =
// new VariableRangePair(v, oldRateZero.get(new LPNContAndRate(ltContPair)));
// newZone._rateZeroContinuous.put(ltContPair, vrp);
// else{
// newZone._rateZeroContinuous.put(ltTranPair, _rateZeroContinuous.get(ltTranPair));
// Copy over the rate zero continuous variables.
// First copy over all the continuous variables that still have
// rate zero.
for(LPNContAndRate ltTranPair : _rateZeroContinuous.keySet()){
// Cast the index.
LPNContinuousPair ltContPair = (LPNContinuousPair) ltTranPair.get_lcPair();
if(!newAssignValues.get(ltContPair).is_newZero()){
// The variable no longer is rate zero, so do nothing.
continue;
}
// If the value has had an assignment, use the new values instead.
// if(oldRateZero.containsKey(new LPNContAndRate(ltContPair))){
if(newAssignValues.contains(ltContPair)){
// Create the new VariableRangePair to add.
Variable v = _lpnList[ltContPair.get_lpnIndex()]
.getContVar(ltContPair.get_ContinuousIndex());
// VariableRangePair vrp =
// new VariableRangePair(v,
// oldRateZero.get(new LPNContAndRate(ltContPair)));
VariableRangePair vrp =
new VariableRangePair(v,
newAssignValues.get(ltContPair).get_Value());
newZone._rateZeroContinuous.insert(
new LPNContAndRate(ltContPair, new IntervalPair(0,0)), vrp);
}
else{
newZone._rateZeroContinuous
.insert(ltTranPair, _rateZeroContinuous.get(ltTranPair));
}
}
// Next add the values that are newly set to rate zero.
// for(LPNContAndRate ltCar : newRateZero.keySet()){
// // Exract the variable.
// Variable v = _lpnList[ltCar.get_lcPair().get_lpnIndex()].
// getContVar(ltCar.get_lcPair().get_ContinuousIndex());
// // Create a VariableRangePair.
// VariableRangePair vrp = new VariableRangePair(v, newRateZero.get(ltCar.get_lcPair()));
// // Add the value to the map.
// newZone._rateZeroContinuous.put(ltCar.get_lcPair(), vrp);
// for(LPNContAndRate ltCar : newRateZero.keySet()){
// // Exract the variable.
// Variable v = _lpnList[ltCar.get_lcPair().get_lpnIndex()].
// getContVar(ltCar.get_lcPair().get_ContinuousIndex());
// // Create a VariableRangePair.
// VariableRangePair vrp = new VariableRangePair(v, newRateZero.get(ltCar.get_lcPair()));
// // Add the value to the map.
// newZone._rateZeroContinuous.put(ltCar.get_lcPair(), vrp);
// We still need to add in the rate zero continuous variables whose rate remains zero
// since their range might have changed. We could check if the range has changed, but
// its just as easy (or easier) to simply add it anyway.
// Added the indecies for the non-zero rate continuous variables to the
// _indexToTimer array.
// Start with the values already in the old array.
// int index = 1; // Index for the next continuous index object.
// for(int i=1; this._indexToTimerPair[i] instanceof LPNContinuousPair; i++){
// // Check that the value should not be removed.
// LPNContAndRate lcar = new LPNContAndRate((LPNContinuousPair) _indexToTimerPair[i]);
// if(newRateZero.containsKey(lcar)){
// continue;
//// else if (oldNonZero.containsKey(lcar)){
//// continue;
// else if (oldRateZero.containsKey(lcar)){
// continue;
// else{
// newZone._indexToTimerPair[index++] = this._indexToTimerPair[i].clone();
// Change to the new references for the oldNonZero. This change to the
// new current rate.
// for(LPNContAndRate lcar : oldNonZero.keySet()){
// int oldIndex = Arrays.binarySearch(_indexToTimerPair, lcar.get_lcPair());
// _indexToTimerPair[oldIndex] = lcar.get_lcPair();
// Add in the indecies for the new non-zero into the old array.
// for(LPNContinuousPair ltCont : newNonZeroRate.keySet()){
// for(LPNContAndRate ltCar : newNonZeroRate.keySet()){
//// newZone._indexToTimerPair[index++] = ltCont;
// newZone._indexToTimerPair[index++] = ltCar.get_lcPair();
// Arrays.sort(newZone._indexToTimerPair);
// Copy over the new transitions.
// for(Transition t : enabledTran){
//// newZone._indexToTimerPair[index++] = ;
// int lpnIndex = t.getLpn().getLpnIndex();
// int tranIndex = t.getIndex();
// newZone._indexToTimerPair[index++] =
// new LPNTransitionPair (lpnIndex, tranIndex);
Arrays.sort(newZone._indexToTimerPair);
}
private void copyRatesNew(Zone newZone, LpnTranList enabledTran,
ContinuousRecordSet newAssignValues){
// Create new rate zero member variable.
newZone._rateZeroContinuous = new DualHashMap<LPNContAndRate,
VariableRangePair>();
// Create new _indexToTimerPair.
// First get the total number of non-zero rate continuous variables that
// are present in the old zone.
int totalContinuous = 0;
for(int i=0; i<_lpnList.length; i++){
totalContinuous += _lpnList[i].getTotalNumberOfContVars();
}
int numberNonZero = totalContinuous - _rateZeroContinuous.size();
// The size is given by
// total number of transitions
// + number of non-zero rate continuous variables previously in the zone
// + number of zero rate continuous variables that now have non-zero
// - number of non-zero rate continuous variables that are now zero
// + 1 for the zero timer.
// int newSize = enabledTran.size()
// + numberNonZero + newAssignValues.get(NEW_NON_ZERO).size()
// - newAssignValues.get(NEW_ZERO).size() + 1;
// TODO: Create an object that stores the records along with this information.
int newNonZero = 0, newZero = 0;
for(UpdateContinuous record : newAssignValues.keySet()){
if(record.newlyNonZero()){
newNonZero++;
}
if(record.newlyZero()){
newZero++;
}
}
int newSize = enabledTran.size() + numberNonZero + newNonZero - newZero + 1;
// Create the timer array.
newZone._indexToTimerPair = new LPNTransitionPair[newSize];
// Add in the zero timer.
newZone._indexToTimerPair[0] = LPNTransitionPair.ZERO_TIMER_PAIR;
int indexTimerCount = 1;
// Sort the previous rate zero continuous variables into rate zero or non-zero.
for(LPNContAndRate ltTranPair : _rateZeroContinuous.keySet()){
// Cast the index.
LPNContinuousPair ltContPair = (LPNContinuousPair) ltTranPair.get_lcPair();
// Check if the variable is a newly assigned value.
UpdateContinuous assignedLtContPair = newAssignValues.get(ltContPair);
if(assignedLtContPair != null){
if(assignedLtContPair.newlyNonZero()){
// Variable was zero and is now non-zero, so add to the the non-zero
// references.
newZone._indexToTimerPair[indexTimerCount++] =
assignedLtContPair.get_lcrPair().get_lcPair().clone();
}
else{
// Variable was zero and is still zero, but an assignment has been
// made. Simply add in the new assigned value.
VariableRangePair vrp = this._rateZeroContinuous.get(ltTranPair);
newZone._rateZeroContinuous.insert(assignedLtContPair.get_lcrPair(),
new VariableRangePair(vrp.get_variable(),
assignedLtContPair.get_Value()));
}
}
else{
newZone._rateZeroContinuous
.insert(ltTranPair, _rateZeroContinuous.get(ltTranPair));
}
}
// Sort the previous non-zero variables into the rate zero and non-zero.
for(int i=1; this._indexToTimerPair[i] instanceof LPNContinuousPair; i++){
LPNContinuousPair lcPair = (LPNContinuousPair) this._indexToTimerPair[i];
// Check if an assignment has been made.
UpdateContinuous updateRecord = newAssignValues.get(lcPair);
if(updateRecord != null){
if(updateRecord.is_newZero()){
// The continuous variable is now a rate zero variable.
LPNContinuousPair ltCar = updateRecord.get_lcrPair().get_lcPair();
Variable v = _lpnList[ltCar.get_lpnIndex()].
getContVar(ltCar.get_ContinuousIndex());
// Dewarp the upper and lower bounds.
IntervalPair values = updateRecord.get_Value();
int currentRate = getCurrentRate(ltCar);
values.set_LowerBound(
values.get_LowerBound() * currentRate);
values.set_UpperBound(
values.get_UpperBound() * currentRate);
// Create a VariableRangePair.
VariableRangePair vrp = new VariableRangePair(v,
values);
// Add the value to the map.
// newZone._rateZeroContinuous.put(ltCar, vrp);
newZone._rateZeroContinuous.insert(updateRecord.get_lcrPair(), vrp);
}
else{
// This non-zero variable still has rate non-zero, but replace
// with the newAssignValues since the rate may have changed.
newZone._indexToTimerPair[indexTimerCount++] =
updateRecord.get_lcrPair().get_lcPair();
}
}
else{
// The variable was non-zero and hasn't had an assignment.
newZone._indexToTimerPair[indexTimerCount++] =
this._indexToTimerPair[i].clone();
}
}
// Copy over the new transitions.
for(Transition t : enabledTran){
int lpnIndex = t.getLpn().getLpnIndex();
int tranIndex = t.getIndex();
newZone._indexToTimerPair[indexTimerCount++] =
new LPNTransitionPair (lpnIndex, tranIndex);
}
Arrays.sort(newZone._indexToTimerPair);
}
/**
* Advances time.
*/
private void advance()
{
for(int i=0; i<dbmSize(); i++)
{
_matrix[dbmIndexToMatrixIndex(0)][dbmIndexToMatrixIndex(i)] =
getUpperBoundbydbmIndex(i);
}
}
/**
* Advances time. (This method should replace advance().)
* @param localStates
*/
public void advance(State[] localStates){
for(LPNTransitionPair ltPair : _indexToTimerPair){
if(ltPair.equals(LPNTransitionPair.ZERO_TIMER_PAIR)){
continue;
}
// Get the new value.
int newValue = 0;
// if(ltPair.get_isTimer()){
if(!(ltPair instanceof LPNContinuousPair)){
// If the pair is a timer, then simply get the stored largest value.
int index = timerIndexToDBMIndex(ltPair);
newValue = getUpperBoundbydbmIndex(index);
}
else{
// If the pair is a continuous variable, then need to find the
// possible largest bound governed by the inequalities.
newValue = ContinuousUtilities.maxAdvance(this,ltPair, localStates);
}
// In either case (timer or continuous), set the upper bound portion
// of the DBM to the new value.
setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR, ltPair, newValue);
}
}
/**
* Copies in the new values needed to add a set of new times.
* @param newZone
* The zone that the values are going to be copied into.
* @param tempZone
* The zone to look up current values of timers.
* @param newTimers
* A collection of the new timers.
* @param oldTimers
* A collection of the older timers.
* @param localStates
* The current state.
*/
private void copyTransitions(Zone tempZone, Collection<LPNTransitionPair> newTimers,
Collection<LPNTransitionPair> oldTimers, State[] localStates){
// Copy the tempZone to the new zone.
for(int i=0; i<tempZone.dbmSize(); i++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
continue;
}
// Get the new index of for the timer.
int newIndexi = i==0 ? 0 :
Arrays.binarySearch(_indexToTimerPair, tempZone._indexToTimerPair[i]);
for(int j=0; j<tempZone.dbmSize(); j++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[j]))
{
continue;
}
int newIndexj = j==0 ? 0 :
Arrays.binarySearch(_indexToTimerPair, tempZone._indexToTimerPair[j]);
_matrix[dbmIndexToMatrixIndex(newIndexi)]
[dbmIndexToMatrixIndex(newIndexj)]
= tempZone.getDbmEntry(i, j);
}
}
// Copy the upper and lower bounds.
for(int i=1; i<tempZone.dbmSize(); i++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
continue;
}
setLowerBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
-1*tempZone.getLowerBoundbydbmIndex(i));
// The minus sign is because _matrix stores the negative of the lower bound.
setUpperBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
tempZone.getUpperBoundbydbmIndex(i));
}
// Copy in the new relations for the new timers.
for(LPNTransitionPair timerNew : newTimers)
{
for(LPNTransitionPair timerOld : oldTimers)
{
setDbmEntry(timerIndexToDBMIndex(timerNew),
timerIndexToDBMIndex(timerOld),
tempZone.getDbmEntry(0, tempZone.timerIndexToDBMIndex(timerOld)));
setDbmEntry(timerIndexToDBMIndex(timerOld),
timerIndexToDBMIndex(timerNew),
tempZone.getDbmEntry(tempZone.timerIndexToDBMIndex(timerOld), 0));
}
}
// Set the upper and lower bounds for the new timers.
for(LPNTransitionPair pair : newTimers){
// Get all the upper and lower bounds for the new timers.
// Get the name for the timer in the i-th column/row of DBM
//String tranName = indexToTran.get(i).getName();
String tranName = _lpnList[pair.get_lpnIndex()]
.getTransition(pair.get_transitionIndex()).getLabel();
ExprTree delay = _lpnList[pair.get_lpnIndex()].getDelayTree(tranName);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
_lpnList[pair.get_lpnIndex()]
.getAllVarsWithValuesAsString(localStates[pair.get_lpnIndex()].getVariableVector());
// Set the upper and lower bound.
int upper, lower;
if(delay.getOp().equals("uniform"))
{
IntervalPair lowerRange = delay.getLeftChild()
.evaluateExprBound(varValues, null, null);
IntervalPair upperRange = delay.getRightChild()
.evaluateExprBound(varValues, null, null);
// The lower and upper bounds should evaluate to a single
// value. Yell if they don't.
if(!lowerRange.singleValue() || !upperRange.singleValue()){
throw new IllegalStateException("When evaulating the delay, " +
"the lower or the upper bound evaluated to a range " +
"instead of a single value.");
}
lower = lowerRange.get_LowerBound();
upper = upperRange.get_UpperBound();
}
else
{
IntervalPair range = delay.evaluateExprBound(varValues, this, null);
lower = range.get_LowerBound();
upper = range.get_UpperBound();
}
setLowerBoundByLPNTransitionPair(pair, lower);
setUpperBoundByLPNTransitionPair(pair, upper);
}
}
/**
* This method sets all the rate to their lower bound.
* Will not work quite right for continuous variables
* with rates that include zero.
*/
private void setAllToLowerBoundRate(){
// Loop through the continuous variables.
for(int i=1; i<_indexToTimerPair.length &&
_indexToTimerPair[i] instanceof LPNContinuousPair; i++){
LPNContinuousPair ltContPair = (LPNContinuousPair) _indexToTimerPair[i];
// For this, recall that for a continuous variable that the lower bound
// rate is stored in the zero column of the matrix.
// setCurrentRate(ltContPair,
// -1*getDbmEntry(0,
// dbmIndexToMatrixIndex(i)));
setCurrentRate(ltContPair,
-1*_matrix[dbmIndexToMatrixIndex(i)][0]);
}
}
/**
* Resets the rates of all continuous varaibles to be their
* lower bounds.
*/
public Zone resetRates(){
// Create the new zone.
Zone newZone = new Zone();
// Copy the rate zero variables.
newZone._rateZeroContinuous = this._rateZeroContinuous.clone();
// Copy the LPNs over.
newZone._lpnList = new LhpnFile[this._lpnList.length];
for(int i=0; i<this._lpnList.length; i++){
newZone._lpnList[i] = this._lpnList[i];
}
// Loop through the variables and save off those
// that are rate zero. Accumulate an array that
// indicates which are zero for faster
// copying. Save the number of continuous varaibles.
boolean[] rateZero = new boolean[this._indexToTimerPair.length]; // Is rate zero.
int zeroCount = 0;
for(int i=1; i<this._indexToTimerPair.length &&
this._indexToTimerPair[i] instanceof LPNContinuousPair; i++){
int lowerBound = -1*getLowerBoundbydbmIndex(i);
int upperBound = getUpperBoundbydbmIndex(i);
if(lowerBound <= 0 && upperBound >= 0){
// The rate zero is in the range, so this will be
// the new current rate.
rateZero[i] = true;
LPNContinuousPair lcPair =
(LPNContinuousPair) this._indexToTimerPair[i].clone();
lcPair.setCurrentRate(0);
// Save as a rate zero continuous variable.
LPNContAndRate newRateZero =
new LPNContAndRate(lcPair,
this.getRateBounds(lcPair));
VariableRangePair vcp =
new VariableRangePair(
this._lpnList[lcPair.get_lpnIndex()]
.getContVar(lcPair.get_ContinuousIndex()),
this.getContinuousBounds(lcPair));
newZone._rateZeroContinuous.insert(newRateZero, vcp);
// Update continuous variable counter.
zeroCount++;
}
}
// Save over the indexToTimer pairs.
newZone._indexToTimerPair =
new LPNTransitionPair[this._indexToTimerPair.length-zeroCount];
for(int i=0, j=0; i<newZone._indexToTimerPair.length; i++,j++){
// Ignore rate zero variables.
if(rateZero[j]){
j++;
}
newZone._indexToTimerPair[i] = this._indexToTimerPair[j].clone();
// If this is a continuous variable, set the rate to the lower bound.
if(newZone._indexToTimerPair[i] instanceof LPNContinuousPair){
((LPNContinuousPair) newZone._indexToTimerPair[i])
.setCurrentRate(this.getSmallestRate(j));
}
}
// Calculate the size of the matrix and create it.
newZone._matrix = new int[newZone.matrixSize()][newZone.matrixSize()];
// Copy over the old matrix for all variables except
// the rate zero variables.
for(int i=0, ioffset=0; i<newZone.matrixSize(); i++){
if(i>=1 && rateZero[i-1]){
ioffset++;
}
for(int j=0, joffset=0; j<newZone.matrixSize(); j++){
if(j>=1 && rateZero[j-1]){
joffset++;
}
newZone._matrix[i][j] = this._matrix[i+ioffset][j+joffset];
}
}
// Warp
newZone.dbmWarp(this);
newZone.recononicalize();
return newZone;
}
/**
* Finds the maximum amount that time cam advance.
* @return
* value.
* The maximum amount that time can advance before a timer expires or an inequality changes
*/
// private int maxAdvance(LPNTransitionPair contVar, State[] localStates){
// /*
// * Several comments in this function may look like C code. That's because,
// * well it is C code from atacs/src/lhpnrsg.c. In particular the
// * lhpnCheckPreds method.
// */
// // Get the continuous variable in question.
// int lpnIndex = contVar.get_lpnIndex();
// int varIndex = contVar.get_transitionIndex();
// Variable variable = _lpnList[lpnIndex].getContVar(varIndex);
//// int lhpnCheckPreds(int p,ineqList &ineqL,lhpnStateADT s,ruleADT **rules,
//// int nevents,eventADT *events)
////#ifdef __LHPN_TRACE__
////printf("lhpnCheckPreds:begin()\n");
////#endif
////int min = INFIN;
////int newMin = INFIN;
// int min = INFINITY;
// int newMin = INFINITY;
////int zoneP = getIndexZ(s->z,-2,p);
////for(unsigned i=0;i<ineqL.size();i++) {
//// if(ineqL[i]->type > 4) {
//// continue;
////#ifdef __LHPN_PRED_DEBUG__
//// printf("Zone to check...\n");
//// printZ(s->z,events,nevents,s->r);
//// printf("Checking ...");
//// printI(ineqL[i],events);
//// printf("\n");
////#endif
//// if(ineqL[i]->place == p) {
// // Get all the inequalities that reference the variable of interest.
// ArrayList<InequalityVariable> inequalities = variable.getInequalities();
// for(InequalityVariable ineq : inequalities){
//// ineq_update(ineqL[i],s,nevents);
// // Update the inequality variable.
// int ineqValue = ineq.evaluate(localStates[varIndex], this);
//// if(ineqL[i]->type <= 1) {
//// /* Working on a > or >= ineq */
// if(ineq.get_op().equals(">") || ineq.get_op().equals(">=")){
// // Working on a > or >= ineq
//// if(s->r->bound[p-nevents].current > 0) {
// // If the rate is positive.
// if(getCurrentRate(contVar) > 0){
//// if(s->m->state[ineqL[i]->signal]=='1') {
// if(ineqValue != 0){
//// if(s->z->matrix[zoneP][0] <
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 1a\n");
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 1.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// if(getDbmEntry(0, contVar.get_transitionIndex())
// < chkDiv(ineq.getConstant(), getCurrentRate(contVar), false)){
// // CP: case 1a.
// newMin = getDbmEntry(0, contVar.get_transitionIndex());
// System.err.println("maxAdvance: Impossible case 1.");
//// else if((-1)*s->z->matrix[0][zoneP] >
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 2a\n");
////#endif
//// newMin = chkDiv(events[p]->urange,
//// s->r->bound[p-nevents].current,'F');
// else if ((-1)*getDbmEntry(contVar.get_transitionIndex(),0)
// > chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP : case 2a
// newMin = INFINITY;
//// else {
//// /* straddle case */
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 3a\n");
////#endif
//// newMin = chkDiv(events[p]->urange,
//// s->r->bound[p-nevents].current,'F');
// else{
// // Straddle case
// // CP : case 3a
// newMin = INFINITY;
// else{
//// else {
//// if(s->z->matrix[zoneP][0] <
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 4a -- min: %d\n",chkDiv(ineqL[i]->constant,s->r->bound[p-nevents].current,'F'));
////#endif
//// newMin = chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F');
// if(getDbmEntry(contVar.get_transitionIndex(), 0)
// < chkDiv(ineq.getConstant(), getCurrentRate(contVar), false)){
// // CP: case 4a -- min
// newMin = chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false);
//// else if((-1)*s->z->matrix[0][zoneP] >
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 5a\n");
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 3.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// else if((-1)*getDbmEntry(contVar.get_transitionIndex(),0)
// < chkDiv(ineq.getConstant(), getCurrentRate(contVar), false)){
// // Impossible case 3.
// newMin = getDbmEntry(0, contVar.get_transitionIndex());
// System.err.print("maxAdvance : Impossible case 3.");
//// else {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 6a -- min: %d\n",s->z->matrix[zoneP][0]);
////#endif
//// /* straddle case */
//// newMin = s->z->matrix[zoneP][0];
// else{
// // CP : cas 6a
// // straddle case
// newMin = getDbmEntry(0,contVar.get_transitionIndex());
//// else {
//// /* warp <= 0 */
// else{
// // warp <= 0.
//// if(s->m->state[ineqL[i]->signal]=='1') {
// if( ineqValue != 1){
//// if(s->z->matrix[0][zoneP] <
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 7a\n");
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 2.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// if(getDbmEntry(contVar.get_transitionIndex(),0)
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 7a.
// newMin = getDbmEntry(0,contVar.get_transitionIndex());
// System.err.println("Warining: impossible case 2a found.");
//// else if((-1)*s->z->matrix[zoneP][0] >
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 8a\n");
////#endif
//// newMin = chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F');
// else if((-1)*getDbmEntry(0, contVar.get_transitionIndex())
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // Impossible case 8a.
// newMin = chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false);
//// else {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 9a\n");
////#endif
//// /* straddle case */
//// newMin = s->z->matrix[zoneP][0];
// else{
// // straddle case
// newMin = getDbmEntry(0, contVar.get_transitionIndex());
//// else {
// else{
//// if(s->z->matrix[0][zoneP] <
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 10a\n");
////#endif
//// newMin = chkDiv(events[p]->lrange,
//// s->r->bound[p-nevents].current,'F');
// if(getDbmEntry(contVar.get_transitionIndex(),0)
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 10a.
// newMin = INFINITY;
//// else if((-1)*s->z->matrix[zoneP][0] >
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 11a\n");
//// printf("z=%d c=%d b=%d\n",
//// s->z->matrix[zoneP][0],
//// ineqL[i]->constant,
//// s->r->bound[p-nevents].current);
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 4.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// else if((-1)*getDbmEntry(0, contVar.get_transitionIndex())
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 7a.
// newMin = getDbmEntry(0,contVar.get_transitionIndex());
// System.err.println("maxAdvance : Impossible case 4.");
//// else {
//// /* straddle case */
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 12a\n");
////#endif
//// newMin = chkDiv(events[p]->lrange,
//// s->r->bound[p-nevents].current,'F');
// else{
// // straddle case
// newMin = INFINITY;
//// else {
//// /* Working on a < or <= ineq */
// else{
// // Working on a < or <= ineq
//// if(s->r->bound[p-nevents].current > 0) {
// if(getUpperBoundForRate(contVar) > 0){
//// if(s->m->state[ineqL[i]->signal]=='1') {
// if(ineqValue != 0){
//// if(s->z->matrix[zoneP][0] <
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 1b -- min: %d\n",chkDiv(ineqL[i]->constant,s->r->bound[p-nevents].current,'F'));
////#endif
//// newMin = chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F');
// if(getDbmEntry(0, contVar.get_transitionIndex())
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 1b -- min.
// newMin = chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false);
//// else if((-1)*s->z->matrix[0][zoneP] >
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 2b\n");
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 5.\n");
////#endif
//// newMin = chkDiv(events[p]->urange,
//// s->r->bound[p-nevents].current,'F');
// if((-1)*getDbmEntry(contVar.get_transitionIndex(), 0)
// < chkDiv(ineq.getConstant(), getCurrentRate(contVar),false)){
// // CP: case 2b.
// newMin = INFINITY;
// System.err.println("Warning : Impossible case 5.");
//// else {
//// /* straddle case */
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 3b -- min: %d\n",s->z->matrix[zoneP][0]);
////#endif
//// newMin = s->z->matrix[zoneP][0];
// else{
// //straddle case
// newMin = getDbmEntry(0,contVar.get_transitionIndex());
//// else {
// else{
//// if(s->z->matrix[zoneP][0] <
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 4b\n");
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 7.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// if(getDbmEntry(0, contVar.get_transitionIndex())
// < chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 4b.
// newMin = getDbmEntry(0, contVar.get_transitionIndex());
// System.err.println("maxAdvance : Impossible case 7.");
//// else if((-1)*s->z->matrix[0][zoneP] >
//// chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 5b\n");
////#endif
//// newMin = chkDiv(events[p]->urange,
//// s->r->bound[p-nevents].current,'F');
// else if((-1)*getDbmEntry(contVar.get_transitionIndex(), 0)
// < chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 5b.
// newMin = INFINITY;
//// else {
//// /* straddle case */
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 6b\n");
////#endif
//// newMin = chkDiv(events[p]->urange,
//// s->r->bound[p-nevents].current,'F');
// else{
// // straddle case
// // CP : case 6b
// newMin = INFINITY;
//// else {
//// /* warp <= 0 */
// else {
// // warp <=0
//// if(s->m->state[ineqL[i]->signal]=='1') {
// if(ineqValue != 0){
//// if(s->z->matrix[0][zoneP] <
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 7b\n");
////#endif
//// newMin = chkDiv(events[p]->lrange,
//// s->r->bound[p-nevents].current,'F');
// if(getDbmEntry(contVar.get_transitionIndex(), 0)
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 7b.
// newMin = INFINITY;
//// else if((-1)*s->z->matrix[zoneP][0] >
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 8b\n");
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 8.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// else if((-1)*getDbmEntry(0, contVar.get_transitionIndex())
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 8b.
// newMin = getDbmEntry(0, contVar.get_transitionIndex());
// System.err.println("Warning : Impossible case 8.");
//// else {
//// /* straddle case */
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 9b\n");
////#endif
//// newMin = chkDiv(events[p]->lrange,
//// s->r->bound[p-nevents].current,'F');
// else {
// // straddle case
// // CP: case 9b.
// newMin = INFINITY;
//// else {
// else {
//// if(s->z->matrix[0][zoneP] <
//// chkDiv((-1)*ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 10b\n");
//// printf("zone: %d const: %d warp: %d chkDiv: %d\n",s->z->matrix[0][zoneP],ineqL[i]->constant,s->r->bound[p-nevents].current,chkDiv((-1)*ineqL[i]->constant,s->r->bound[p-nevents].current,'F'));
////#endif
////#ifdef __LHPN_WARN__
//// warn("checkPreds: Impossible case 6.\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// if(getDbmEntry(contVar.get_transitionIndex(),0)
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar), false)){
// // CP: case 10b.
// newMin = getDbmEntry(0,contVar.get_transitionIndex());
// System.err.println("Warning : Impossible case 6");
//// else if((-1)*s->z->matrix[zoneP][0] >
//// (-1)*chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 11b\n");
////#endif
//// newMin = chkDiv(ineqL[i]->constant,
//// s->r->bound[p-nevents].current,'F');
// else if((-1)*getDbmEntry(0,contVar.get_transitionIndex())
// < (-1)*chkDiv(ineq.getConstant(),
// getCurrentRate(contVar),false)){
// // CP: case 7b.
// newMin = chkDiv(ineq.getConstant(), getCurrentRate(contVar),false);
//// else {
//// /* straddle case */
////#ifdef __LHPN_PRED_DEBUG__
//// printf("CP:case 12b\n");
////#endif
//// newMin = s->z->matrix[zoneP][0];
// else {
// // straddle case
// // CP : case 12b
// newMin = getDbmEntry(0, contVar.get_transitionIndex());
//// if(newMin < min) {
//// min = newMin;
// // Check if the value can be lowered.
// if(newMin < min){
// min = newMin;
////#ifdef __LHPN_PRED_DEBUG__
////printf("Min leaving checkPreds for %s: %d\n",events[p]->event,min);
////#endif
////return min;
// return min;
/* (non-Javadoc)
* @see java.lang.Object#clone()
*/
public Zone clone()
{
// TODO: Check if finished.
Zone clonedZone = new Zone();
clonedZone._matrix = new int[this.matrixSize()][this.matrixSize()];
for(int i=0; i<this.matrixSize(); i++)
{
for(int j=0; j<this.matrixSize(); j++)
{
clonedZone._matrix[i][j] = this._matrix[i][j];
}
}
// clonedZone._indexToTimerPair = Arrays.copyOf(_indexToTimerPair, _indexToTimerPair.length);
clonedZone._indexToTimerPair = new LPNTransitionPair[this._indexToTimerPair.length];
for(int i=0; i<_indexToTimerPair.length; i++){
clonedZone._indexToTimerPair[i] = this._indexToTimerPair[i].clone();
}
clonedZone._hashCode = this._hashCode;
clonedZone._lpnList = Arrays.copyOf(this._lpnList, this._lpnList.length);
clonedZone._rateZeroContinuous = this._rateZeroContinuous.clone();
return clonedZone;
}
/**
* Restricts the lower bound of a timer.
*
* @param timer
* The timer to tighten the lower bound.
*/
private void restrictTimer(int timer)
{
//int dbmIndex = Arrays.binarySearch(_indexToTimer, timer);
_matrix[dbmIndexToMatrixIndex(timer)][dbmIndexToMatrixIndex(0)]
= getLowerBoundbydbmIndex(timer);
}
/**
* Restricts the lower bound of a continuous variable. Also checks fixes
* the upper bound to be at least as large if needed. This method
* is usually used as a result of an event firing.
* @param ltContPair
* The index of the continuous variable to restrict.
* @param constant
* The constant value of the inequality event that is being used to update
* the variable indexed by ltContPair.
*
*/
private boolean restrictContinuous(LPNContinuousPair ltContPair, int constant){
// It will be quicker to get the DBM index for the ltContPair one time.
int variableIndex = timerIndexToDBMIndex(ltContPair);
int zeroIndex = timerIndexToDBMIndex(LPNTransitionPair.ZERO_TIMER_PAIR);
// Set the lower bound the variable (which is the DBM[variabl][0] entry.
// Note : the lower bound in the zone is actually the negative of the lower
// bound hence the -1 on the warpValue.
setDbmEntry(variableIndex, zeroIndex, ContinuousUtilities.chkDiv(-1*constant, ltContPair.getCurrentRate(), true));
// Check if the upper bound needs to be advanced and advance it if necessary.
if(getDbmEntry(zeroIndex, variableIndex) < ContinuousUtilities.chkDiv(constant, ltContPair.getCurrentRate(), true)){
// If the upper bound in the zones is less than the new restricting value, we
// must advance it for the zone to remain consistent.
setDbmEntry(zeroIndex, variableIndex, ContinuousUtilities.chkDiv(constant, ltContPair.getCurrentRate(), true));
return true;
}
return false;
}
/**
* Restricts the continuous variables in the zone according to the inequalities in a set of events.
* @param eventSet
* A set of inequality events. Does nothing if the event set does not contian inequalities.
*/
private void restrictContinuous(EventSet eventSet){
// Check that the eventSet is a set of Inequality events.
if(!eventSet.isInequalities()){
// If the eventSet is not a set of inequalities, do nothing.
return;
}
HashSet<LPNContinuousPair> adjustedColumns = new HashSet<LPNContinuousPair>();
boolean needsAdjusting = false;
// Restrict the variables according to each of the inequalities in the eventSet.
for(Event e : eventSet){
// Get the inequality.
InequalityVariable iv = e.getInequalityVariable();
// Extract the variable. I will assume the inequality only depends on a single
// variable.
Variable x = iv.getContVariables().get(0);
// Extract the index.
int lpnIndex = iv.get_lpn().getLpnIndex();
// Extract the variable index.
// DualHashMap<String, Integer> variableIndexMap = _lpnList[lpnIndex].getVarIndexMap();
DualHashMap<String, Integer> variableIndexMap = _lpnList[lpnIndex].getContinuousIndexMap();
int variableIndex = variableIndexMap.getValue(x.getName());
// Package it up for referencing.
// LPNContinuousPair ltContPair = new LPNContinuousPair(lpnIndex, variableIndex, 0);
LPNContinuousPair ltContPair = new LPNContinuousPair(lpnIndex, variableIndex);
// Need the current rate for the varaible, grab the stored LPNContinuousPair.
int zoneIndex = Arrays.binarySearch(_indexToTimerPair, ltContPair);
if(zoneIndex > 0){
ltContPair = (LPNContinuousPair) _indexToTimerPair[zoneIndex];
}
//setDbmEntry(zoneIndex, 0, -ContinuousUtilities.chkDiv(iv.getConstant(), ltContPair.getCurrentRate(), true));
// Perform the restricting.
needsAdjusting = needsAdjusting | restrictContinuous(ltContPair, iv.getConstant());
if(needsAdjusting){
adjustedColumns.add(ltContPair);
}
}
// If one of the continuous variables has been moved forward, the other colmns
// need to be adjusted to keep a consistent zone.
if(needsAdjusting){
// At least one of the continuous variables has been moved forward,
// so se need to ajust the bounds to keep a consistent zone.
for(int i=1; i<_indexToTimerPair.length; i++){
LPNTransitionPair ltpair = _indexToTimerPair[i];
if(adjustedColumns.contains(ltpair)){
// This continuous variables already had the upper bound
// adjusted.
continue;
}
else{
// Add one to the upper bounds.
setDbmEntry(0, i, getDbmEntry(0, i)+1);
}
}
}
}
/**
* Returns a zone that is the result from resticting the this zone according to a list of firing event inequalities.
* @param eventSet
* The list of inequalities that are firing.
* @return
* The new zone that is the result of restricting this zone according to the firing of the inequalities
* in the eventSet.
*/
public Zone getContinuousRestrictedZone(EventSet eventSet, State[] localStates){
// Make a new copy of the zone.
Zone z = this.clone();
if(eventSet == null){
return z;
}
z.restrictContinuous(eventSet);
// z.advance(localStates);
// z.recononicalize();
return z;
}
/**
* The list of enabled timers.
* @return
* The list of all timers that have reached their lower bounds.
*/
public List<Transition> getEnabledTransitions()
{
ArrayList<Transition> enabledTransitions = new ArrayList<Transition>();
// Check if the timer exceeds its lower bound staring with the first nonzero
// timer.
for(int i=1; i<_indexToTimerPair.length; i++)
{
if(getDbmEntry(0, i) >= -1 * getLowerBoundbydbmIndex(i))
{
enabledTransitions.add(_lpnList[_indexToTimerPair[i].get_lpnIndex()]
.getTransition(_indexToTimerPair[i].get_transitionIndex()));
}
}
return enabledTransitions;
}
/**
* Gives the list of enabled transitions associated with a particular LPN.
* @param LpnIndex
* The Index of the LPN the Transitions are a part of.
* @return
* A List of the Transitions that are enabled in the LPN given by the index.
*/
public List<Transition> getEnabledTransitions(int LpnIndex){
ArrayList<Transition> enabledTransitions = new ArrayList<Transition>();
// Check if the timer exceeds its lower bound staring with the first nonzero
// timer.
for(int i=1; i<_indexToTimerPair.length; i++)
{
if(getDbmEntry(0, i) >= -1 * getLowerBoundbydbmIndex(i))
{
LPNTransitionPair ltPair = _indexToTimerPair[i];
if( ltPair.get_lpnIndex() == LpnIndex){
enabledTransitions.add(_lpnList[ltPair.get_lpnIndex()]
.getTransition(ltPair.get_transitionIndex()));
}
}
}
return enabledTransitions;
}
/**
* Find the next possible events.
*
* @param LpnIndex
* The index of the LPN that is of interest.
* @param localState
* The state associated with the LPN indexed by LpnIndex.
* @return
* LpnTranList is populated with a list of
* EventSets pertaining to the LPN with index LpnIndex. An EventSet can
* either contain a transition to
* fire or set of inequalities to change sign.
*/
public LpnTranList getPossibleEvents(int LpnIndex, State localState){
LpnTranList result = new LpnTranList();
// Look through the timers and continuous variables. For the timers
// determine if they are ready to fire. For the continuous variables,
// look up the associated inequalities and see if any of them are ready
// to fire.
// for(LPNTransitionPair ltPair : _indexToTimerPair){
// We do not need to consider the zero timer, so start the
// for loop at i=1 and not i=0.
for(int i=1; i<_indexToTimerPair.length; i++){
LPNTransitionPair ltPair = _indexToTimerPair[i];
// The enabled events are grouped with the LPN that they affect. So if
// this pair does not belong to the current LPN under consideration, skip
// processing it.
if(ltPair.get_lpnIndex() != LpnIndex){
continue;
}
// If the index refers to a timer (and not a continuous variable) and has exceeded its lower bound,
// then add the transition.
if(!(ltPair instanceof LPNContinuousPair)){
//result.add(_lpnList[ltPair.get_lpnIndex()].getTransition(ltPair.get_transitionIndex()));
// The index refers to a timer. Now check if time has advanced
// far enough for the transition to fire.
if(getDbmEntry(0, i) >= -1 * getLowerBoundbydbmIndex(i)){
Event e = new Event(_lpnList[ltPair.get_lpnIndex()].getTransition(ltPair.get_transitionIndex()));
result = addSetItem(result, e, localState);
}
}
else{
// The index refers to a continuous variable.
// First check for a rate change event.
LPNContinuousPair ltContPair =
((LPNContinuousPair) ltPair).clone();
IntervalPair ratePair = getRateBounds(ltContPair);
// if(!ratePair.singleValue()
// && (ltContPair.getCurrentRate() == ratePair.getSmallestRate())){
// // The rate represents a range of rates and no rate change
// // event has occured.
// if(ratePair.containsZero()){
// // The rate contians zero, so we need to add two rate
// // events: one for the lower bound and one for the upper
// // bound.
// LPNContinuousPair otherltContPair = ltContPair.clone();
// ltContPair.setCurrentRate(ratePair.get_LowerBound());
// otherltContPair.setCurrentRate(ratePair.get_UpperBound());
// // Create the events.
// Event lowerRateChange = new Event(ltContPair);
// Event upperRateChange = new Event(otherltContPair);
// // Add them to the result set.
// result = addSetItem(result, lowerRateChange, localState);
// result = addSetItem(result, upperRateChange, localState);
// else{
// ltContPair.setCurrentRate(ratePair.getLargestRate());
// result = addSetItem(result,
// new Event(ltContPair), localState);
result = createRateEvents(ltContPair, ratePair, result, localState);
// Check all the inequalities for inclusion.
Variable contVar = _lpnList[ltPair.get_lpnIndex()].getContVar(ltPair.get_transitionIndex());
if(contVar.getInequalities() != null){
for(InequalityVariable iv : contVar.getInequalities()){
// Check if the inequality can change.
if(ContinuousUtilities.inequalityCanChange(this, iv, localState)){
result = addSetItem(result, new Event(iv), localState);
}
}
}
}
}
// Check the rate zero variables for possible rate change events.
for(LPNContAndRate lcrPair : _rateZeroContinuous.keySet()){
// Get the reference object:
LPNContinuousPair ltContPair = lcrPair.get_lcPair();
// Extract the range of rates.
IntervalPair ratePair = lcrPair.get_rateInterval();
result = createRateEvents(ltContPair, ratePair, result, localState);
}
return result;
}
private LpnTranList createRateEvents(LPNContinuousPair ltContPair, IntervalPair ratePair,
LpnTranList result, State localState){
if(!ratePair.singleValue()
&& (ltContPair.getCurrentRate() == ratePair.getSmallestRate())){
// The rate represents a range of rates and no rate change
// event has occured.
// if(ratePair.containsZero()){
if(ratePair.strictlyContainsZero()){
// The rate contians zero, so we need to add two rate
// events: one for the lower bound and one for the upper
// bound.
LPNContinuousPair otherltContPair = ltContPair.clone();
ltContPair.setCurrentRate(ratePair.get_LowerBound());
otherltContPair.setCurrentRate(ratePair.get_UpperBound());
// Create the events.
Event lowerRateChange = new Event(ltContPair);
Event upperRateChange = new Event(otherltContPair);
// Add them to the result set.
result = addSetItem(result, lowerRateChange, localState);
result = addSetItem(result, upperRateChange, localState);
}
else{
ltContPair.setCurrentRate(ratePair.getLargestRate());
result = addSetItem(result,
new Event(ltContPair), localState);
}
}
return result;
}
/**
* Addes or removes items as appropriate to update the current
* lsit of possible events. Note the type LpnTranList extends
* LinkedList<Transition>. The type EventSet extends transition
* specifically so that objects of EventSet type can be place in
* this list.
* @param EventList
* The list of possible events.
*/
public LpnTranList addSetItem(LpnTranList E, Event e, State s){
// void lhpnAddSetItem(eventSets &E,lhpnEventADT e,ineqList &ineqL,lhpnZoneADT z,
// lhpnRateADT r,eventADT *events,int nevents,
// lhpnStateADT cur_state)
//int rv1l,rv1u,rv2l,rv2u,iZ,jZ;
// Note the LPNTranList plays the role of the eventSets.
int rv1l=0, rv1u=0, rv2l=0, rv2u=0, iZ, jZ;
//#ifdef __LHPN_TRACE__
//printf("lhpnAddSetItem:begin()\n");
//#endif
//#ifdef __LHPN_ADD_ACTION__
//printf("Event sets entering:\n");
//printEventSets(E,events,ineqL);
//printf("Examining event: ");
//printLhpnEvent(e,events,ineqL);
//#endif
//eventSet* eSet = new eventSet();
//eventSets* newE = new eventSets();
//bool done = false;
//bool possible = true;
//eventSets::iterator i;
// Create the new LpnTranlist for holding the events.
EventSet eSet = new EventSet();
LpnTranList newE = new LpnTranList();
boolean done = false;
boolean possible = true;
//if ((e->t == -1) && (e->ineq == -1)) {
if(e.isRate()){
//eSet->insert(e);
eSet.add(e);
//newE->push_back(*eSet);
// I believe that I should actually copy over the old list
// and then add the new event set.
newE = E.copy();
newE.addLast(eSet);
//E.clear();
//E = *newE;
// The previous two commands act to pass the changes of E
// back out of the functions. So returning the new object
// is suficient.
//#ifdef __LHPN_ADD_ACTION__
//printf("Event sets leaving:\n");
//printEventSets(E,events,ineqL);
//#endif
//#ifdef __LHPN_TRACE__
//printf("lhpnAddSetItem:end()\n");
//#endif
//return;
return newE;
}
//if (e->t == -1) {
if(e.isInequality()){
//ineq_update(ineqL[e->ineq],cur_state,nevents);
// Is this necessary, or even correct to update the inequalities.
System.out.println("Note the inequality is not being updated before in addSetItem");
// In this case the Event e represents an inequality.
InequalityVariable ineq = e.getInequalityVariable();
//rv2l = chkDiv(-1 * ineqL[e->ineq]->constant,
// r->bound[ineqL[e->ineq]->place-nevents].current,'C');
//rv2u = chkDiv(ineqL[e->ineq]->constant,
// r->bound[ineqL[e->ineq]->place-nevents].current,'C');
//iZ = getIndexZ(z,-2,ineqL[e->ineq]->place);
// Need to extract the rate.
// To do this, I'll create the indexing object.
Variable v = ineq.getContVariables().get(0);
// Find the LPN.
int lpnIndex = ineq.get_lpn().getLpnIndex();
int varIndex = _lpnList[lpnIndex].
getContinuousIndexMap().getValue(v.getName());
// Package it all up.
// LPNTransitionPair ltPair =
// new LPNTransitionPair(lpnIndex, varIndex, false);
// Note : setting the rate is not necessary since
// this is only being used as aan index.
// LPNContinuousPair ltPair =
// new LPNContinuousPair(lpnIndex, varIndex, 0);
LPNContinuousPair ltPair = new LPNContinuousPair(lpnIndex, varIndex);
rv2l = ContinuousUtilities.chkDiv(-1*ineq.getConstant(),
getCurrentRate(ltPair), true);
rv2u = ContinuousUtilities.chkDiv(ineq.getConstant(),
getCurrentRate(ltPair), true);
iZ = Arrays.binarySearch(_indexToTimerPair, ltPair);
//} else {
}
else{
//iZ = getIndexZ(z,-1,e->t);
// In this case, the event is a transition.
Transition t = e.getTransition();
int lpnIndex = t.getLpn().getLpnIndex();
int tranIndex = t.getIndex();
// Package the results.
// LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, tranIndex, true);
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, tranIndex);
iZ = Arrays.binarySearch(_indexToTimerPair, ltPair);
}
//for(i=E.begin();i!=E.end()&&!done;i++) {
// Recall that E contains the events sets which are inherited from the Transition class
// so they can be placed in an LpnTranList. So consider each event set.
for(Transition es : E){
if(!(es instanceof EventSet)){
// This collection should contain event sets, not transitions.
throw new IllegalArgumentException("The eventSet was a Transition object not an EventSet object.");
}
EventSet eventSet = (EventSet) es;
if(done){
// Copy any remaining sets into newE.
newE.add(eventSet.clone());
break;
}
//eventSet* workSet = new eventSet();
// /* Both actions are predicate changes */
// /* Both predicates are on the same variable */
// /* Predicates are on different variables */
// /* New action is predicate change, old is transition firing (case 3) */
// /* TODO: One more ugly case, is it needed? */
// /* New action is transition firing, old is predicate change (case 4) */
// /* TODO: one more ugly case, is it needed? */
/**
* Adds a set item and explicitly sets a flag to remove the next set item
* upon firing.
* @param E
* The list of current event sets.
* @param e
* The event to add.
* @param s
* The current state.
* @param removeNext
* True if afer firing this event, the next event should be removed from the
* queue.
* @return
* The new event set.
*/
public LpnTranList addSetItem(LpnTranList E, Event e, State s,
boolean removeNext){
return null;
}
/**
* Updates the continuous variables that are set by firing a transition.
* @param firedTran
* The transition that fired.
* @param s
* The current (local) state.
*/
public void updateContinuousAssignment(Transition firedTran, State s){
// Get the LPN.
LhpnFile lpn = _lpnList[firedTran.getLpn().getLpnIndex()];
// Get the current values of the (local) state.
HashMap<String,String> currentValues =
lpn.getAllVarsWithValuesAsString(s.getVariableVector());
// Get all the continuous variable assignments.
HashMap<String, ExprTree> assignTrees = firedTran.getContAssignTrees();
for(String contVar : assignTrees.keySet()){
// Get the bounds to assign the continuous variables.
IntervalPair assignment =
assignTrees.get(contVar).evaluateExprBound(currentValues, this, null);
// Make the assignment.
setContinuousBounds(contVar, lpn, assignment);
}
}
/**
* Updates the continuous variables according to the given values.
* @param newContValues
* The new values of the continuous variables.
*/
// public void updateContinuousAssignment(ArrayList<HashMap<LPNContAndRate, IntervalPair>> newAssignValues){
// public void updateContinuousAssignment(ArrayList<UpdateContinuous> newAssignValues){
public void updateContinuousAssignment(ContinuousRecordSet newAssignValues){
/*
* In dealing with the rates and continuous variables, there are four cases to consider. These cases
* depend on whether the the old value of the 'current rate' is zero or non-zero and whether the
* new value of the 'current rate' is zero or non-zero.
* 0. old rate is zero, new rate is zero.
* Lookup the zero rate in the _rateZeroContinuous and add any new continuous assignments.
* 1. old rate is zero, new rate is non-zero.
* Remove the rate from the _rateZeroContinuous and add the zone.
* 2. old rate is non-zero, new rate is zero.
* Add the variable with its upper and lower bounds to _rateZeroContinuous.
* 3. old rate is non-zero, new rate is non-zero.
* Get the LPNContinuousPair from the _indexToTimerPair and change the value.
*
* Note: If an assignment is made to the variable, then it should be considered as a
* new variable.
*/
// The updating of the rate-zero continuous variables is taken care of
// by the copyRates. So just need to update the values in the zone.
// This amounts to copying over the values from the old zone for
// continuous variables that haven't changed and copying in
// values for the new vlues.
// final int OLD_ZERO = 0; // Case 0 in description.
// final int NEW_NON_ZERO = 1; // Case 1 in description.
// final int NEW_ZERO = 2; // Case 2 in description.
// final int OLD_NON_ZERO = 3; // Cade 3 in description.
// HashMap<LPNContAndRate, IntervalPair> newNonZero =
// newAssignValues.get(NEW_NON_ZERO);
// HashMap<LPNContAndRate, IntervalPair> oldNonZero =
// newAssignValues.get(OLD_NON_ZERO);
// for(Entry<LPNContAndRate, IntervalPair> pair : newNonZero.entrySet()){
// // Set the lower bound.
// setDbmEntryByPair(pair.getKey()._lcPair,
// LPNTransitionPair.ZERO_TIMER_PAIR, (-1)*pair.getValue().get_LowerBound());
// // Set the upper bound.
// setDbmEntryByPair(pair.getKey().get_lcPair(),
// LPNTransitionPair.ZERO_TIMER_PAIR, pair.getValue().get_UpperBound());
// // Set the rate.
// LPNTransitionPair ltpair = pair.getKey().get_lcPair();
// int index = Arrays.binarySearch(_indexToTimerPair, ltpair);
// _matrix[dbmIndexToMatrixIndex(index)][0] = -1*pair.getKey().get_rateInterval().get_LowerBound();
// _matrix[0][dbmIndexToMatrixIndex(index)] = pair.getKey().get_rateInterval().get_UpperBound();
// for(Entry<LPNContAndRate, IntervalPair> pair : oldNonZero.entrySet()){
// // Set the lower bound.
// setDbmEntryByPair(pair.getKey().get_lcPair(),
// LPNTransitionPair.ZERO_TIMER_PAIR, (-1)*pair.getValue().get_LowerBound());
// // Set the upper bound.
// setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR, pair.getKey().get_lcPair(),
// pair.getValue().get_UpperBound());
// int index = Arrays.binarySearch(_indexToTimerPair, pair.getKey().get_lcPair());
// // Set the current rate.
// LPNTransitionPair ltPair = pair.getKey().get_lcPair();
// setCurrentRate(ltPair, pair.getKey().get_lcPair().getCurrentRate());
// // Set the upper and lower bounds for the rates.
// _matrix[dbmIndexToMatrixIndex(index)][0] = -1*pair.getKey().get_rateInterval().get_LowerBound();
// _matrix[0][dbmIndexToMatrixIndex(index)] = pair.getKey().get_rateInterval().get_UpperBound();
}
/* (non-Javadoc)
* @see verification.timed_state_exploration.zone.Zone#getLexicon()
*/
// public HashMap<Integer, Transition> getLexicon(){
// if(_indexToTransition == null){
// return null;
// return new HashMap<Integer, Transition>(_indexToTransition);
// public void setLexicon(HashMap<Integer, Transition> lexicon){
// _indexToTransition = lexicon;
/**
* Gives an array that maps the index of a timer in the DBM to the timer's index.
* @return
* The array that maps the index of a timer in the DBM to the timer's index.
*/
// public int[] getIndexToTimer(){
// return Arrays.copyOf(_indexToTimerPair, _indexToTimerPair.length);
/**
* Calculates a warping value needed to warp a Zone. When a zone is being warped the form
* r1*z2 - r1*z1 + r2*z1 becomes important in finding the new values of the zone. For example,
*
* @param z1
* Upper bound or negative lower bound.
* @param z2
* Relative value.
* @param r1
* First ratio.
* @param r2
* Second ratio.
* @return
* r1*z2 - r1*z1 + r2*z1
*/
public int warp(int z1, int z2, int r1, int r2){
/*
* See "Verification of Analog/Mixed-Signal Circuits Using Labeled Hybrid Petri Nets"
* by S. Little, D. Walter, C. Myers, R. Thacker, S. Batchu, and T. Yoneda
* Section III.C for details on how this function is used and where it comes
* from.
*/
return r1*z2 - r1*z1 + r2*z1;
}
/**
* Warps this Zone with the aid of rate infomation from the previous Zone.
*
* @param oldZone
* The previous Zone.
* @return
* The warped Zone.
*/
public void dbmWarp(Zone oldZone){
/*
* See "Verification of Analog/Mixed-Signal Circuits Using Labeled Hybrid Petri Nets"
* by S. Little, D. Walter, C. Myers, R. Thacker, S. Batchu, and T. Yoneda
* Section III.C for details on how this function is used and where it comes
* from.
*/
// return null;
// void lhpnDbmWarp(lhpnStateADT s,eventADT *events,int nevents)
// #ifdef __LHPN_TRACE__
// printf("lhpnDbmWarp:begin()\n");
// #endif
// /* TODO: This appears to NOT work when INFIN is in the bounds?
// Should I have to worry about this case? */
// for(int i=1;i<s->z->dbmEnd;i++) {
// for(int j=i+1;j<s->z->dbmEnd;j++) {
// double iVal = 0.0;
// double jVal = 0.0;
// double iWarp = 0;
// double jWarp = 0;
// double iXDot = 0;
// double jXDot = 0;
// According to atacs comments, this appears to NOT work when
// INFIN is in the bounds.
// This portion of the code handles the warping of the relative
// parts of the zone.
for(int i=1; i< dbmSize(); i++){
for(int j=i+1; j<dbmSize(); j++){
double iVal, jVal, iWarp, jWarp, iXDot, jXDot;
// Note : the iVal and the jVal correspond to the
// alpha and beta describe in Scott Little's thesis.
// /* deal w/ the fact that we might have continuous and discrete
// places */
// #ifdef __LHPN_DEBUG_WARP__
// printf("Working on %d->%d\n",i,j);
// #endif
// if(s->z->curClocks[i].enabling == -2) {
// iVal = fabs((double)s->r->oldBound[s->z->curClocks[i].enabled-nevents].current /
// (double)s->r->bound[s->z->curClocks[i].enabled-nevents].current);
// iWarp = fabs((double)s->r->oldBound[s->z->curClocks[i].enabled-nevents].current);
// iXDot = fabs((double)s->r->bound[s->z->curClocks[i].enabled-nevents].current);
// Do some warping when dealing with the continuous variables.
if(_indexToTimerPair[i] instanceof LPNContinuousPair){
// Calcualte the alpha value.
iVal = Math.floor(Math.abs(
(double) oldZone.getCurrentRate(_indexToTimerPair[i]) /
(double) this.getCurrentRate(_indexToTimerPair[i])));
// The old rate the zone was warped by.
iWarp = Math.floor(Math.abs(
(double) oldZone.getCurrentRate(_indexToTimerPair[i])));
// The current rate rate of this continuous variable.
iXDot = Math.floor(Math.abs(
(double) this.getCurrentRate(_indexToTimerPair[i])));
// I'm not going to do any warping when the previous rate
// is zero. This statement is a break to go to next i value
// and not the next j.
if(iWarp == 0){
break;
}
}
// else {
// iVal = 1.0;
// iWarp = 1.0;
// iXDot = 1.0;
else{
// The current variable is a timer, so the new rate and old rate
// are both 1. Hence we have
iVal = 1.0;
iWarp = 1.0;
iXDot = 1.0;
}
// if(s->z->curClocks[j].enabling == -2) {
// jVal = fabs((double)s->r->oldBound[s->z->curClocks[j].enabled-nevents].current /
// (double)s->r->bound[s->z->curClocks[j].enabled-nevents].current);
// jWarp = fabs((double)s->r->oldBound[s->z->curClocks[j].enabled-nevents].current);
// jXDot = fabs((double)s->r->bound[s->z->curClocks[j].enabled-nevents].current);
// Do some warping of the second variable if it is a continuous variable.
if(_indexToTimerPair[j] instanceof LPNContinuousPair){
// Calcualte the alpha value.
jVal = Math.floor(Math.abs(
(double) oldZone.getCurrentRate(_indexToTimerPair[j]) /
(double) this.getCurrentRate(_indexToTimerPair[j])));
// The old rate the zone was warped by.
jWarp = Math.floor(Math.abs(
(double) oldZone.getCurrentRate(_indexToTimerPair[j])));
// The current rate of this continuous variable.
jXDot = Math.floor(Math.abs(
(double) this.getCurrentRate(_indexToTimerPair[j])));
// I'm not going to do any warping when the previous rate is
// zero.
if(jWarp == 0){
continue;
}
}
// else {
// jVal = 1.0;
// jWarp = 1.0;
// jXDot = 1.0;
else{
// The current variable is a timer, so the new rate and old rate
// are both 1. Hence we have
jVal = 1.0;
jWarp = 1.0;
jXDot = 1.0;
}
// #ifdef __LHPN_DEBUG_WARP__
// printf("iVal: %f, jVal: %f, iWarp: %f, jWarp: %f, iXDot: %f, jXDot: %f\n",iVal,jVal,iWarp,jWarp,iXDot,jXDot);
// /* printf("calc1- jWarp:%d * s->z->matrix[i][j]:%d / jXDot:%d + (-1 * jWarp:%d * s->z->matrix[i][0]:%d) / jXDot:%d + (iWarp:%d * s->z->matrix[i][0]:%d) / iXDot:%d = %d 1:%d 2:%d 3:%d -- %d\n", jWarp,s->z->matrix[i][j],jXDot,jWarp,s->z->matrix[i][0],jXDot,iWarp,s->z->matrix[i][0],iXDot,(chkDiv((jWarp * s->z->matrix[i][j]),jXDot,'C') + chkDiv((-1 * jWarp * s->z->matrix[i][0]),jXDot,'C') + chkDiv((iWarp * s->z->matrix[i][0]),iXDot,'C')),chkDiv((jWarp * s->z->matrix[i][j]),jXDot,'C'),chkDiv((-1 * jWarp * s->z->matrix[i][0]),jXDot,'C'),chkDiv((iWarp * s->z->matrix[i][0]),iXDot,'C'),(int)ceil(((jWarp * s->z->matrix[i][j])/jXDot) +((-1 * jWarp * s->z->matrix[i][0])/jXDot) + ((iWarp * s->z->matrix[i][0])/iXDot))); */
// /* printf("calc2-jWarp:%f * s->z->matrix[j][i]):%d/jXDot:%f) + ((-1 * jWarp:%f * s->z->matrix[0][i]:%d)/jXDot:%f) + ((iWarp:%f * s->z->matrix[0][i]):%d,iXDot:%f)) = %d 1:%f 2:%f 3:%f\n",jWarp,s->z->matrix[j][i],jXDot,jWarp,s->z->matrix[0][i],jXDot,iWarp,s->z->matrix[0][i],iXDot,(int) ceil(((jWarp * s->z->matrix[j][i])/jXDot) + ((-1 * jWarp * s->z->matrix[0][i])/jXDot) + ((iWarp * s->z->matrix[0][i]),iXDot)),((jWarp * (double)s->z->matrix[j][i])/jXDot),((-1 * jWarp * (double)s->z->matrix[0][i])/jXDot),(iWarp * (double)s->z->matrix[0][i])/iXDot); */
// #endif
// if(iVal > jVal) {
// /* s->z->matrix[i][j] = */
// /* chkDiv((jWarp * s->z->matrix[i][j]),jXDot,'C') + */
// /* chkDiv((-1 * jWarp * s->z->matrix[i][0]),jXDot,'C') + */
// /* chkDiv((iWarp * s->z->matrix[i][0]),iXDot,'C'); */
// /* s->z->matrix[j][i] = */
// /* chkDiv((jWarp * s->z->matrix[j][i]),jXDot,'C') + */
// /* chkDiv((-1 * jWarp * s->z->matrix[0][i]),jXDot,'C') + */
// /* chkDiv((iWarp * s->z->matrix[0][i]),iXDot,'C'); */
// s->z->matrix[i][j] = (int)
// ceil(((jWarp * s->z->matrix[i][j])/jXDot) +
// ((-1 * jWarp * s->z->matrix[i][0])/jXDot) +
// ((iWarp * s->z->matrix[i][0])/iXDot));
// s->z->matrix[j][i] = (int)
// ceil(((jWarp * s->z->matrix[j][i])/jXDot) +
// ((-1 * jWarp * s->z->matrix[0][i])/jXDot) +
// ((iWarp * s->z->matrix[0][i])/iXDot));
// The zone is warped differently depending on which of rate is
// larger. See Scott Little's Thesis for more details.
if(iVal > jVal){
setDbmEntry(j, i, (int)
Math.ceil(((jWarp*getDbmEntry(j, i))/jXDot) +
((-1*jWarp*getDbmEntry(0, i)/jXDot)) +
((iWarp*getDbmEntry(0, i)/iXDot))));
setDbmEntry(i, j, (int)
Math.ceil(((jWarp*getDbmEntry(i, j))/jXDot) +
((-1*jWarp*getDbmEntry(i, 0)/jXDot)) +
((iWarp*getDbmEntry(i, 0)/iXDot))));
}
// else {
// /* s->z->matrix[j][i] = */
// /* chkDiv((iWarp * s->z->matrix[j][i]),iXDot,'C') + */
// /* chkDiv((-1 * iWarp * s->z->matrix[j][0]),iXDot,'C') + */
// /* chkDiv((jWarp * s->z->matrix[j][0]),jXDot,'C'); */
// /* s->z->matrix[i][j] = */
// /* chkDiv((iWarp * s->z->matrix[i][j]),iXDot,'C') + */
// /* chkDiv((-1 * iWarp * s->z->matrix[0][j]),iXDot,'C') + */
// /* chkDiv((jWarp * s->z->matrix[0][j]),jXDot,'C'); */
// s->z->matrix[j][i] = (int)
// ceil(((iWarp * s->z->matrix[j][i])/iXDot) +
// ((-1 * iWarp * s->z->matrix[j][0])/iXDot) +
// ((jWarp * s->z->matrix[j][0])/jXDot));
// s->z->matrix[i][j] = (int)
// ceil(((iWarp * s->z->matrix[i][j])/iXDot) +
// ((-1 * iWarp * s->z->matrix[0][j])/iXDot) +
// ((jWarp * s->z->matrix[0][j])/jXDot));
else{
setDbmEntry(i, j, (int)
Math.ceil(((iWarp*getDbmEntry(i, j))/iXDot) +
((-1*iWarp*getDbmEntry(0, j)/iXDot)) +
((jWarp*getDbmEntry(0, j)/jXDot))));
setDbmEntry(j, i, (int)
Math.ceil(((iWarp*getDbmEntry(j, i))/iXDot) +
((-1*iWarp*getDbmEntry(j, 0)/iXDot)) +
((jWarp*getDbmEntry(j, 0)/jXDot))));
}
}
}
// #ifdef __LHPN_DEBUG_WARP__
// printf("After fixing up initial warp conditions.\n");
// printZ(s->z,events,nevents,s->r);
// #endif
// for(int i=1;i<s->z->dbmEnd;i++) {
// if(s->z->curClocks[i].enabling == -2) {
// Handle the warping of the bounds.
for(int i=1; i<dbmSize(); i++){
if(_indexToTimerPair[i] instanceof LPNContinuousPair){
// #ifdef __LHPN_DEBUG_WARP__
// printf("old:%d new:%d v1:%d v2:%d\n",s->r->oldBound[s->z->curClocks[i].enabled-nevents].current,s->r->bound[s->z->curClocks[i].enabled-nevents].current,s->z->matrix[0][i],s->z->matrix[i][0]);
// #endif
// if(abs(s->z->matrix[0][i]) != INFIN) {
// s->z->matrix[0][i] =
// chkDiv((abs(s->r->oldBound[s->z->curClocks[i].enabled-nevents].current)
// * s->z->matrix[0][i]),
// abs(s->r->bound[s->z->curClocks[i].enabled-nevents].current)
if(Math.abs(getDbmEntry(i, 0)) != INFINITY ){
if(oldZone.getCurrentRate(_indexToTimerPair[i]) == 0){
// If the older rate was zero, then we just need to
// divide by the new rate.
setDbmEntry(i, 0, ContinuousUtilities.chkDiv(
getDbmEntry(i,0),
Math.abs(getCurrentRate(_indexToTimerPair[i])),
true));
}
else{
// Undo the old warping and introduce the new warping.
// If the bound is infinite, then division does nothing.
setDbmEntry(i, 0, ContinuousUtilities.chkDiv(
Math.abs(oldZone.getCurrentRate(_indexToTimerPair[i]))
* getDbmEntry(i, 0),
Math.abs(getCurrentRate(_indexToTimerPair[i])),
true));
}
}
// if(abs(s->z->matrix[i][0]) != INFIN) {
// s->z->matrix[i][0] =
// chkDiv((abs(s->r->oldBound[s->z->curClocks[i].enabled-nevents].current)
// * s->z->matrix[i][0]),
// abs(s->r->bound[s->z->curClocks[i].enabled-nevents].current)
if(Math.abs(getDbmEntry(0, i)) != INFINITY){
if(oldZone.getCurrentRate(_indexToTimerPair[i]) == 0){
setDbmEntry(0, i, ContinuousUtilities.chkDiv(
getDbmEntry(0,i),
Math.abs(getCurrentRate(_indexToTimerPair[i])),
true));
}
else{
// Undo the old warping and introduce the new warping.
// If the bound is inifite, then division does nothing.
setDbmEntry(0, i, ContinuousUtilities.chkDiv(
Math.abs(oldZone.getCurrentRate(_indexToTimerPair[i]))
* getDbmEntry(0, i),
Math.abs(getCurrentRate(_indexToTimerPair[i])),
true));
}
}
}
}
// #ifdef __LHPN_DEBUG_WARP__
// printf("After fixing up places.\n");
// printZ(s->z,events,nevents,s->r);
// #endif
// for(int i=1;i<s->z->dbmEnd;i++) {
// if(s->z->curClocks[i].enabling == -2) {
for(int i=1; i<dbmSize(); i++){
if(_indexToTimerPair[i] instanceof LPNContinuousPair){
// #ifdef __LHPN_DEBUG_WARP__
// printf("Warp: %d\n",s->r->oldBound[s->z->curClocks[i].enabled-nevents].current);
// #endif
// if(((float)s->r->oldBound[s->z->curClocks[i].enabled-nevents].current /
// (float)s->r->bound[s->z->curClocks[i].enabled-nevents].current) < 0.0) {
// /* swap */
// int temp = s->z->matrix[0][i];
// s->z->matrix[0][i] = s->z->matrix[i][0];
// s->z->matrix[i][0] = temp;
// for(int j=1;j<s->z->dbmEnd;j++) {
// /* TBD: If i & j are both changing direction do we need to
// remove the warp info? */
// if(i != j) {
// s->z->matrix[j][i] = INFIN;
// s->z->matrix[i][j] = INFIN;
// Handle the case when the warping takes us into negative space.
if((double) oldZone.getCurrentRate(_indexToTimerPair[i])/
(double) this.getCurrentRate(_indexToTimerPair[i]) < 0.0){
/* We are warping into the negative space, so swap the upper and
* lower bounds.
*/
int temp = getDbmEntry(i, 0);
setDbmEntry(i,0, getDbmEntry(0, i));
setDbmEntry(0, i, temp);
// Set the relationships to Infinity since nothing else is known.
for(int j=1; j<dbmSize(); j++){
if(i != j){
setDbmEntry(i, j, INFINITY);
setDbmEntry(j, i, INFINITY);
}
}
}
}
}
// #ifdef __LHPN_DEBUG_WARP__
// printf("After handling negative warps.\n");
// printZ(s->z,events,nevents,s->r);
// #endif
// for(int i=1;i<s->z->dbmEnd;i++) {
// if(s->z->curClocks[i].enabling == -2) {
// for(int i=1; i<dbmSize(); i++){
// if(_indexToTimerPair[i] instanceof LPNContinuousPair){
// int newCwarp = s->r->bound[s->z->curClocks[i].enabled-nevents].current;
// int newLwarp = s->r->bound[s->z->curClocks[i].enabled-nevents].lower;
// int newUwarp = s->r->bound[s->z->curClocks[i].enabled-nevents].upper;
// s->r->oldBound[s->z->curClocks[i].enabled-nevents].current = newCwarp;
// s->r->oldBound[s->z->curClocks[i].enabled-nevents].lower = newLwarp;
// s->r->oldBound[s->z->curClocks[i].enabled-nevents].upper = newUwarp;
// #ifdef __LHPN_DEBUG_WARP__
// printf("New warp for %d: %d\n",i,s->r->oldBound[s->z->curClocks[i].enabled-nevents].current);
// #endif
/* Do the nature of how I store things, I do not think I need to do
* this portion.
*/
// #ifdef __LHPN_DEBUG_WARP__
// printf("Before recanon.\n");
// printZ(s->z,events,nevents,s->r);
// #endif
// recanonZ(s->z);
// #ifdef __LHPN_DEBUG_WARP__
// printf("After recanon.\n");
// printZ(s->z,events,nevents,s->r);
// #endif
}
/**
* The DiagonalNonZeroException extends the java.lang.RuntimerExpcetion.
* The intention is for this exception to be thrown is a Zone has a non zero
* entry appear on the diagonal.
*
* @author Andrew N. Fisher
*
*/
public class DiagonalNonZeroException extends java.lang.RuntimeException
{
/**
* Generated serialVersionUID.
*/
private static final long serialVersionUID = -3857736741611605411L;
/**
* Creates a DiagonalNonZeroException.
* @param Message
* The message to be displayed when the exception is thrown.
*/
public DiagonalNonZeroException(String Message)
{
super(Message);
}
}
/**
* This exception is thrown when trying to merge two zones whose corresponding timers
* do not agree.
* @author Andrew N. Fisher
*
*/
// public class IncompatibleZoneException extends java.lang.RuntimeException
// // TODO : Check if this class can be removed.
// /**
// * Generated serialVersionUID
// */
// private static final long serialVersionUID = -2453680267411313227L;
// public IncompatibleZoneException(String Message)
// super(Message);
/**
* Clears out the lexicon.
*/
// public static void clearLexicon(){
// _indexToTransition = null;
private IntervalPair parseRate(String rate){
String rateNoSpaces = rate.trim();
// First check if the string is a single number.
// Integer i = Integer.parseInt(rate);
// if(i != null){
// // The string is a number, so set the upper and lower bounds equal.
// return new IntervalPair(i,i);
// First check for a comma (representing an interval input).
int commaIndex = rateNoSpaces.indexOf(",");
if(commaIndex < 0){
// Assume that the string is a constant. A NumberFormatException
// will be thrown otherwise.
int i = Integer.parseInt(rate);
return new IntervalPair(i,i);
}
String lowerString = rateNoSpaces.substring(1, commaIndex).trim();
String upperString = rateNoSpaces.substring(commaIndex+1,
rateNoSpaces.length()-1).trim();
return new IntervalPair(Integer.parseInt(lowerString),
Integer.parseInt(upperString));
}
/**
* Get the list of LhpnFile objects that this Zone depends on.
* @return
* The lits of LhpnFile objects that this Zone depends on.
*/
public LhpnFile[] get_lpnList(){
return _lpnList;
}
/**
* Performs a division of two integers and either takes the ceiling or the floor. Note :
* The integers are converted to doubles for the division so the choice of ceiling or floor is
* meaningful.
* @param top
* The numerator.
* @param bottom
* The denominator.
* @param ceil
* True indicates return the ceiling and false indicates return the floor.
* @return
* Returns the ceiling of top/bottom if ceil is true and the floor of top/bottom otherwise.
*/
// public int chkDiv(int top, int bottom, Boolean ceil){
// /*
// * This method was taken from atacs/src/hpnrsg.c
// */
// int res = 0;
// if(top == INFINITY ||
// top == INFINITY * -1) {
// if(bottom < 0) {
// return top * -1;
// return top;
// if(bottom == INFINITY) {
// return 0;
// if(bottom == 0) {
// System.out.println("Warning: Divided by zero.");
// bottom = 1;
// double Dres,Dtop,Dbottom;
// Dtop = top;
// Dbottom = bottom;
// Dres = Dtop/Dbottom;
// if(ceil) {
// res = (int)Math.ceil(Dres);
// else if(!ceil) {
// res = (int)Math.floor(Dres);
// return res;
public int getCurrentRate(LPNTransitionPair contVar){
if(!(contVar instanceof LPNContinuousPair)){
// The LPNTransitionsPair does not refer to a continuous variable, so yell.
throw new IllegalArgumentException("Zone.getCurrentRate was called" +
" on an LPNTransitionPair that was not an LPNContinuousPair.");
}
LPNContinuousPair cV = (LPNContinuousPair) contVar;
// Search for the pair in the zone.
int index = Arrays.binarySearch(_indexToTimerPair, cV);
if(index >0){
// The continuous variable was found amongst the non zero rate continuous variables.
// Grab that indexing object instead since it has the rate.
cV = (LPNContinuousPair) _indexToTimerPair[index];
return cV.getCurrentRate();
}
else{
// Since the variable was not found in the non-zero rate continuous
// variables, assume the rate is zero.
return 0;
}
}
/**
* Sets the current rate for a continuous variable. It sets the rate regardless of
* whether the variable is in the rate zero portion of the Zone or not. But it
* does not move variables in and out of the zone.
* @param contVar
* The index of the variable whose rate is going to be set.
* @param currentRate
* The value of the rate.
*/
public void setCurrentRate(LPNTransitionPair contVar, int currentRate){
if(!(contVar instanceof LPNContinuousPair)){
// The LPNTransitionsPair does not refer to a continuous variable, so yell.
throw new IllegalArgumentException("Zone.getCurrentRate was called" +
" on an LPNTransitionPair that was not an LPNContinuousPair.");
}
LPNContinuousPair cV = (LPNContinuousPair) contVar;
// Check for the current variable in the rate zero variables.
VariableRangePair variableRange = _rateZeroContinuous.
getValue(new LPNContAndRate(cV, new IntervalPair(0,0)));
if(variableRange != null){
LPNContinuousPair lcPair = (LPNContinuousPair)_rateZeroContinuous.
getKey(variableRange).get_lcPair();
lcPair.setCurrentRate(currentRate);
return;
}
// Check for the current variable in the Zone varaibles.
int index = Arrays.binarySearch(_indexToTimerPair, contVar);
if(index >= 0){
// The variable was found, set the rate.
LPNContinuousPair lcPair = (LPNContinuousPair) _indexToTimerPair[index];
lcPair.setCurrentRate(currentRate);
}
}
/**
* Adds a transition to a zone.
* @param newTransitions
* The newly enabled transitions.
* @return
* The result of adding the transition.
*/
public Zone addTransition(HashSet<LPNTransitionPair> newTransitions, State[] localStates){
/*
* The zone will remain the same for all the continuous variables.
* The only thing that will change is a new transition will be added into the transitions.
*/
// Create a Zone to alter.
Zone newZone = new Zone();
// Create a copy of the LPN list.
newZone._lpnList = Arrays.copyOf(this._lpnList, this._lpnList.length);
// Copy the rate zero continuous variables.
newZone._rateZeroContinuous = this._rateZeroContinuous.clone();
// Create a copy of the current indexing pairs.
//newZone._indexToTimerPair = Arrays.copyOf(_indexToTimerPair, _indexToTimerPair.length);
newZone._indexToTimerPair = new LPNTransitionPair[_indexToTimerPair.length + newTransitions.size()];
for(int i=0; i<_indexToTimerPair.length; i++){
newZone._indexToTimerPair[i] = _indexToTimerPair[i];
}
// Add the new transitions to the _indexToTimerPair list.
// for(int i=_indexToTimerPair.length; i<newZone._indexToTimerPair.length; i++){
// // Set up the index for the newTransitions list.
// int newTransitionIndex = i-_indexToTimerPair.length;
// newZone._indexToTimerPair[i] = newTransitions[newTransitionIndex];
int i = _indexToTimerPair.length;
for(LPNTransitionPair ltPair : newTransitions){
newZone._indexToTimerPair[i++] = ltPair;
}
// Sort the _indexToTimerPair list.
Arrays.sort(newZone._indexToTimerPair);
// Create matrix.
newZone._matrix = new int[newZone._indexToTimerPair.length+1][newZone._indexToTimerPair.length+1];
// Convert the current transitions to a collection of transitions.
HashSet<LPNTransitionPair> oldTransitionSet = new HashSet<LPNTransitionPair>();
for(LPNTransitionPair ltPair : _indexToTimerPair){
oldTransitionSet.add(ltPair);
}
// Copy in the new transitions.
newZone.copyTransitions(this, newTransitions, oldTransitionSet, localStates);
// newZone.advance(localStates);
// newZone.recononicalize();
return newZone;
}
/**
* This method creates a zone identical to the current zone except all the current rates are turned to 1.
* This is to provide a previous zone to the initial zone for warping.
* @return
* A zone identical to this zone with all rates set to 1.
*/
private Zone beforeInitialZone(){
Zone z = this.clone();
// for(int i=1; _indexToTimerPair[i] instanceof LPNContinuousPair; i++){
for(int i=1; i<z._indexToTimerPair.length; i++){
if(!(z._indexToTimerPair[i] instanceof LPNContinuousPair)){
break;
}
LPNContinuousPair lcPair = (LPNContinuousPair) z._indexToTimerPair[i];
lcPair.setCurrentRate(1);
}
return z;
}
/**
* Returns a new Zone that has added into the DBM portion the given
* continuous varaible that used to be in the zone.
* @param ltContPair The continuous varaible to move from the rate zero
* variables.
* @return The resulting Zone.
*/
public Zone moveOldRateZero(LPNContinuousPair ltContPair) {
// Create a Zone to alter.
Zone newZone = new Zone();
// Create a copy of the LPN list.
newZone._lpnList = Arrays.copyOf(this._lpnList, this._lpnList.length);
// Copy the rate zero continuous variables.
newZone._rateZeroContinuous = this._rateZeroContinuous.clone();
// Extract the continuous variable from the rate zero variables.
LPNContAndRate rateZero = new LPNContAndRate(ltContPair,
new IntervalPair(0,0));
// This gets the values for the continuous variable.
VariableRangePair vrp = newZone._rateZeroContinuous.get(rateZero);
IntervalPair values = vrp.get_range();
// This replaces the rateZero with the one stored in the _rateZeroContinuous.
// The purpose of this is to obtain the stored range of rates.
rateZero = newZone._rateZeroContinuous.getKey(vrp);
// Get the range of rates.
IntervalPair rangeOfRates = rateZero.get_rateInterval();
// Remove the continuous variable.
newZone._rateZeroContinuous.delete(rateZero);
// Create a copy of the current indexing pairs.
//newZone._indexToTimerPair = Arrays.copyOf(_indexToTimerPair, _indexToTimerPair.length);
newZone._indexToTimerPair = new LPNTransitionPair[_indexToTimerPair.length + 1];
for(int i=0; i<_indexToTimerPair.length; i++){
newZone._indexToTimerPair[i] = _indexToTimerPair[i];
}
// Add the continuous variable to the list of variables/transition in the DBM.
int numOfTransitions = _indexToTimerPair.length;
newZone._indexToTimerPair[numOfTransitions] = ltContPair;
// Sort the _indexToTimerPair list.
Arrays.sort(newZone._indexToTimerPair);
// Create matrix.
newZone._matrix = new int[newZone._indexToTimerPair.length+1][newZone._indexToTimerPair.length+1];
// Convert the current transitions to a collection of transitions.
HashSet<LPNTransitionPair> oldTransitionSet = new HashSet<LPNTransitionPair>();
for(LPNTransitionPair ltPair : _indexToTimerPair){
oldTransitionSet.add(ltPair);
}
// Copy in the new transitions.
newZone.copyTransitions(this, new HashSet<LPNTransitionPair>(),
oldTransitionSet, null);
// Get the index for the variable.
int index = Arrays.binarySearch(newZone._indexToTimerPair, ltContPair);
// Copy in the upper and lower bound of the old rate zero variable.
// newZone.setLowerBoundByLPNTransitionPair(ltContPair,
// rangeOfRates.get_LowerBound());
// newZone.setUpperBoundByLPNTransitionPair(ltContPair,
// rangeOfRates.get_UpperBound());
// Copy in the range of rates.
newZone.setLowerBoundbydbmIndex(index, rangeOfRates.get_LowerBound());
newZone.setUpperBoundbydbmIndex(index, rangeOfRates.get_UpperBound());
if(ltContPair.getCurrentRate()>0){
// Set the upper and lower bounds.
newZone.setDbmEntry(0, index,
ContinuousUtilities.chkDiv(values.get_UpperBound(),
ltContPair.getCurrentRate(), true));
newZone.setDbmEntry(index, 0,
ContinuousUtilities.chkDiv(-1*values.get_LowerBound(),
ltContPair.getCurrentRate(), true));
}
else{
// Set the upper and lower bounds. Since the rate is zero
// We swap the real upper and lower bounds.
newZone.setDbmEntry(0, index,
ContinuousUtilities.chkDiv(values.get_LowerBound(),
ltContPair.getCurrentRate(), true));
newZone.setDbmEntry(index, 0,
ContinuousUtilities.chkDiv(-1*values.get_UpperBound(),
ltContPair.getCurrentRate(), true));
}
// Set the DBM to having no relating information for how this
// variables relates to the other variables.
for(int i=1; i<newZone._indexToTimerPair.length; i++){
if(i == index){
continue;
}
else{
newZone.setDbmEntry(index, i, Zone.INFINITY);
newZone.setDbmEntry(i, index, Zone.INFINITY);
}
}
// newZone.advance(localStates);
newZone.recononicalize();
return newZone;
}
/**
* Determines whether time has advanced far enough for an inequality to change
* truth value.
* @param ineq
* The inequality to test whether its truth value can change.
* @param localState
* The state associated with the inequality.
* @return
* True if the inequality can change truth value, false otherwise.
*/
// private boolean inequalityCanChange(InequalityVariable ineq, State[] localStates){
// private boolean inequalityCanChange(InequalityVariable ineq, State localState){
// // Find the index of the continuous variable this inequality refers to.
// // I'm assuming there is a single variable.
// LhpnFile lpn = ineq.get_lpn();
// Variable contVar = ineq.getInequalities().get(0);
// DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
// int contIndex = variableIndecies.get(contVar);
// // Package up the information into a the index. Note the current rate doesn't matter.
// LPNContinuousPair index = new LPNContinuousPair(lpn.getLpnIndex(), contIndex, 0);
// // Get the current rate.
// int currentRate = getCurrentRate(index);
// // Get the current value of the inequality. This requires looking into the current state.
//// int currentValue = localStates[lpn.getLpnIndex()].getCurrentValue(contIndex);
// int currentValue = localState.getCurrentValue(contIndex);
// // Get the Zone index of the variable.
// int zoneIndex = Arrays.binarySearch(_indexToTimerPair, index);
//// bool lhpnPredCanChange(ineqADT ineq,lhpnZoneADT z,lhpnRateADT r,
//// lhpnMarkingADT m,eventADT *events,int nevents,
//// lhpnStateADT s)
////ineq_update(ineq,s,nevents);
////#ifdef __LHPN_TRACE__
////printf("lhpnPredCanChange:begin()\n");
////#endif
////#ifdef __LHPN_PRED_DEBUG__
////printf("lhpnPredCanChange Examining: ");
////printI(ineq,events);
////printf("signal = %c, %d",s->m->state[ineq->signal],r->bound[ineq->place-nevents].current);
////printf("\n");
////if (r->bound[ineq->place-nevents].current != 0)
////printf("divRes: %d\n",chkDiv(ineq->constant,
//// r->bound[ineq->place-nevents].current,'F'));
////#endif
////
////if(ineq->type == 0 || ineq->type == 1) {
// if(ineq.get_op().contains(">")){
////int zoneP = getIndexZ(z,-2,ineq->place);
////if(zoneP == -1) {
////warn("An inequality produced a place not in the zone.");
////return false;
////if(r->bound[ineq->place-nevents].current < 0 &&
////m->state[ineq->signal] == '1') {
// // First check cases when the rate is negative.
// if(currentRate < 0 && currentValue != 0){
////if((-1)*z->matrix[zoneP][0] <=
//// (-1)*chkDiv(ineq->constant,r->bound[ineq->place-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCanChange:1\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return true;
// if((-1) * getDbmEntry(0, zoneIndex) <=
// (-1)*chkDiv(ineq.getConstant(), currentRate, false)){
// return true;
////} else {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCannotChange:1\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return false;
// else{
// return false;
////else if(r->bound[ineq->place-nevents].current > 0 &&
//// m->state[ineq->signal] == '0') {
// else if(currentRate > 0 && currentValue == 0){
////if(z->matrix[zoneP][0] >=
//// chkDiv(ineq->constant,r->bound[ineq->place-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCanChange:2\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return true;
// if(getDbmEntry(0, zoneIndex) <=
// chkDiv(ineq.getConstant(), currentRate, false)){
// return true;
////} else {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCannotChange:2\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
////return false;
// else{
// return false;
////else {
////#ifdef __LHPN_PRED_DEBUG__
////printf("predCannotChange:3\n");
////printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
////return false;
////
////else if(ineq->type == 2 || ineq->type == 3) {
// else if(ineq.get_op().contains("<")){
////int zoneP = getIndexZ(z,-2,ineq->place);
////if(zoneP == -1) {
////warn("An inequality produced a place not in the zone.");
////return false;
////if(r->bound[ineq->place-nevents].current < 0 &&
////m->state[ineq->signal] == '0') {
// if(currentRate < 0 && currentValue == 0){
////if((-1)*z->matrix[zoneP][0] <=
//// (-1)*chkDiv(ineq->constant,r->bound[ineq->place-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCanChange:4\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return true;
// if((-1) * getDbmEntry(0, zoneIndex) <=
// (-1)*chkDiv(ineq.getConstant(), currentRate, false)){
// return true;
////} else {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCannotChange:4\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return false;
// else{
// return false;
////else if(r->bound[ineq->place-nevents].current > 0 &&
//// m->state[ineq->signal] == '1') {
// else if (currentRate > 0 &&
// currentValue != 0){
////if(z->matrix[zoneP][0] >=
//// chkDiv(ineq->constant,r->bound[ineq->place-nevents].current,'F')) {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCanChange:5\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return true;
// if(getDbmEntry(0, zoneIndex) >=
// chkDiv(ineq.getConstant(), currentRate, false)){
// return true;
////} else {
////#ifdef __LHPN_PRED_DEBUG__
//// printf("predCannotChange:5\n");
//// printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
//// return false;
// else {
// return false;
////else {
////#ifdef __LHPN_PRED_DEBUG__
////printf("predCanChange:6\n");
////printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
////return false;
// else {
// return false;
////#ifdef __LHPN_PRED_DEBUG__
////printf("predCanChange:7\n");
////printf("rate: %d state: %c\n",r->bound[ineq->place-nevents].current,
//// m->state[ineq->signal]);
////#endif
////return false;
// return false;
}
|
package org.obeonetwork.m2doc.provider.test;
import java.util.List;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Platform;
import org.junit.Before;
import org.junit.Test;
import org.obeonetwork.m2doc.provider.DeclaredProviderListener;
import org.obeonetwork.m2doc.test.ErrorLogListener;
import org.obeonetwork.m2doc.test.M2DocTestPlugin;
import static org.junit.Assert.assertTrue;
/**
* {@link DeclaredProviderListener} test class.
*
* @author pguilet<pierre.guilet@obeo.fr>
*/
public class DeclaredProviderListenerTest {
/**
* Listener that catch exception that would be put in the error log.
*/
private ErrorLogListener errorLogListener;
@Before
public void setUp() {
org.junit.Assume.assumeTrue(Platform.isRunning());
errorLogListener = M2DocTestPlugin.getDefault().getErrorLogListener();
Platform.addLogListener(errorLogListener);
}
/**
* Tests that the {@link DeclaredProviderListener} initialization has logged an error in the error log.
*/
@Test
public void listenerInitializedErrorTest() {
assertTrue(findErrorWithMessage(errorLogListener.getAllStatus(),
"Problem while registering M2Doc Providers : Plug-in org.obeonetwork.m2doc.tests was unable to load class org.obeonetwork.m2doc.test.Wrong."));
assertTrue(findErrorWithMessage(errorLogListener.getAllStatus(),
"Problem while registering M2Doc Providers : the provider \"org.obeonetwork.m2doc.provider.test.TestDiagramProvider\" is already registered. The current implementation will not be used."));
}
/**
* Returns true if the given message is found in a status. False otherwise.
*
* @param allStatus
* all the status logged.
* @param errorMessage
* the message we want to find in a status.
* @return true if the given message is found in a status. False otherwise.
*/
private boolean findErrorWithMessage(List<IStatus> allStatus, String errorMessage) {
boolean found = false;
for (IStatus iStatus : allStatus) {
if (errorMessage.equals(iStatus.getMessage())) {
found = true;
}
}
return found;
}
}
|
package org.vast.physics;
public class TimeExtent
{
protected double baseTime = Double.NaN;
protected double timeBias = 0;
protected double timeStep = 0;
protected double leadTimeDelta = 0;
protected double lagTimeDelta = 0;
protected boolean baseAtNow; // if true baseTime is associated to machine clock
protected boolean endNow; // if true stopTime is associated to machine clock
protected boolean beginNow; // if true startTime is associated to machine clock
public TimeExtent()
{
}
public TimeExtent(double baseJulianTime)
{
this.baseTime = baseJulianTime;
}
public TimeExtent copy()
{
TimeExtent timeExtent = new TimeExtent();
timeExtent.baseTime = this.getBaseTime();
timeExtent.timeBias = this.timeBias;
timeExtent.timeStep = this.timeStep;
timeExtent.leadTimeDelta = this.leadTimeDelta;
timeExtent.lagTimeDelta = this.lagTimeDelta;
timeExtent.baseAtNow = this.baseAtNow;
timeExtent.endNow = this.endNow;
timeExtent.beginNow = this.beginNow;
return timeExtent;
}
public TimeExtent(double baseJulianTime, double timeBiasSeconds, double timeStepSeconds, double leadTimeDeltaSeconds, double lagTimeDeltaSeconds)
{
this.baseTime = baseJulianTime;
this.timeBias = timeBiasSeconds;
this.timeStep = timeStepSeconds;
this.leadTimeDelta = Math.abs(leadTimeDeltaSeconds);
this.lagTimeDelta = Math.abs(lagTimeDeltaSeconds);
}
public void setBaseTime(double baseJulianTime)
{
this.baseTime = baseJulianTime;
}
public void setTimeBias(double seconds)
{
this.timeBias = seconds;
}
public void setTimeStep(double seconds)
{
this.timeStep = seconds;
}
public void setLeadTimeDelta(double seconds)
{
this.leadTimeDelta = Math.abs(seconds);
}
public void setLagTimeDelta(double seconds)
{
this.lagTimeDelta = Math.abs(seconds);
}
public void setDeltaTimes(double leadDeltaSeconds, double lagDeltaSeconds)
{
this.leadTimeDelta = Math.abs(leadDeltaSeconds);
this.lagTimeDelta = Math.abs(lagDeltaSeconds);
}
/**
* To get baseTime without bias applied
* @return
*/
public double getBaseTime()
{
return baseTime;
}
/**
* To get baseTime or absTime with bias applied
* @return
*/
public double getAdjustedTime()
{
return (getBaseTime() + timeBias);
}
public double getTimeBias()
{
return timeBias;
}
public double getTimeStep()
{
return timeStep;
}
public double getLeadTimeDelta()
{
return leadTimeDelta;
}
public double getLagTimeDelta()
{
return lagTimeDelta;
}
public double getTimeRange()
{
return (getAdjustedLeadTime() - getAdjustedLagTime());
}
public double getAdjustedLeadTime()
{
return (getBaseTime() + timeBias + leadTimeDelta);
}
public double getAdjustedLagTime()
{
return (getBaseTime() + timeBias - lagTimeDelta);
}
public boolean isBaseAtNow()
{
return baseAtNow;
}
public void setBaseAtNow(boolean baseAtNow)
{
this.baseAtNow = baseAtNow;
}
public boolean isBeginNow()
{
return beginNow;
}
public void setBeginNow(boolean beginNow)
{
this.beginNow = beginNow;
}
public boolean isEndNow()
{
return endNow;
}
public void setEndNow(boolean endNow)
{
this.endNow = endNow;
}
/**
* Returns number of full time steps
* @return
*/
public int getNumberOfSteps()
{
if (timeStep == 0.0)
return 1;
else
return (int) ((getAdjustedLeadTime() - getAdjustedLagTime()) / timeStep);
}
/**
* Calculates times based on current time settings, always assuring
* that both endpoints are included even if an uneven time step occurs
* at the end
*/
public double[] getTimes()
{
double time = getAdjustedLeadTime();
double lagTime = getAdjustedLagTime();
// if step is 0 returns two extreme points
if (timeStep == 0.0)
{
return new double[] {time, lagTime};
}
double timeRange = Math.abs(time - lagTime);
double remainder = timeRange % timeStep;
int steps = (int) (timeRange / timeStep) + 1;
double[] times;
if (remainder != 0.0)
{
times = new double[steps + 1];
times[steps] = lagTime;
}
else
times = new double[steps];
for (int i = 0; i < steps; i++)
times[i] = time - i * timeStep;
return times;
}
public String toString()
{
String tString = new String("TimeExtent:");
tString += "\n baseTime = " + baseTime;
tString += "\n timeBias = " + timeBias;
tString += "\n timeStep = " + timeStep;
tString += "\n leadTimeDelta = " + leadTimeDelta;
tString += "\n lagTimeDelta = " + lagTimeDelta;
return tString;
}
/**
* Tests if time ranges are equal
* @param timeExtent
* @return
*/
public boolean compareTimeRange(TimeExtent timeExtent)
{
if (this.getAdjustedLagTime() != timeExtent.getAdjustedLagTime())
return false;
if (this.getAdjustedLeadTime() != timeExtent.getAdjustedLeadTime())
return false;
return true;
}
/**
* Checks if this timeExtent contains the given timeExtent
* @param timeExtent
* @return
*/
public boolean contains(TimeExtent timeExtent)
{
double thisLag = this.getAdjustedLagTime();
double thisLead = this.getAdjustedLeadTime();
double otherLag = timeExtent.getAdjustedLagTime();
double otherLead = timeExtent.getAdjustedLeadTime();
if (otherLag < thisLag)
return false;
if (otherLag > thisLead)
return false;
if (otherLead < thisLag)
return false;
if (otherLead > thisLead)
return false;
return true;
}
/**
* Checks if this timeExtent intersects the given timeExtent
* @param timeExtent
* @return
*/
public boolean intersects(TimeExtent timeExtent)
{
double thisLag = this.getAdjustedLagTime();
double thisLead = this.getAdjustedLeadTime();
double otherLag = timeExtent.getAdjustedLagTime();
double otherLead = timeExtent.getAdjustedLeadTime();
if (otherLag > thisLag && otherLag < thisLead)
return true;
if (otherLead > thisLag && otherLead < thisLead)
return true;
return false;
}
}
|
package verification.timed_state_exploration.zoneProject;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import lpn.parser.ExprTree;
import lpn.parser.LhpnFile;
import lpn.parser.Transition;
import lpn.parser.Variable;
import verification.platu.lpn.DualHashMap;
import verification.platu.lpn.LpnTranList;
import verification.platu.stategraph.State;
/**
* This class is for storing and manipulating timing zones via difference bound matrices.
* The underlying structure is backed by a two dimensional array. A difference bound
* matrix has the form
* t0 t1 t2 t3
* t0 m00 m01 m02 m03
* t1 m10 m11 m12 m13
* t2 m20 m21 m22 m23
* t3 m30 m31 m32 m33
* where tj - ti<= mij. In particular, m0k is an upper bound for tk and -mk is a lower
* bound for tk.
*
* The timers are referred to by an index.
*
* This class also contains a public nested class DiagonalNonZeroException which extends
* java.lang.RuntimeException. This exception may be thrown if the diagonal entries of a
* zone become nonzero.
*
* @author Andrew N. Fisher
*
*/
public class Zone{
// Abstraction Function :
// The difference bound matrix is represented by int[][].
// In order to keep track of the upper and lower bounds of timers from when they are first
// enabled, the matrix will be augmented by a row and a column. The first row will contain
// the upper bounds and the first column will contain the negative of the lower bounds.
// For one timer t1 that is between 2 and 3, we might have
// lb t0 t1
// ub x 0 3
// t0 0 m m
// t1 -2 m m
// where x is not important (and will be given a zero value), 3 is the upper bound on t1
// and -2 is the negative of the lower bound. The m values represent the actual difference
// bound matrix. Also note that the column heading are not part of the stored representation
// lb stands for lower bound while ub stands for upper bound.
// This upper and lower bound information is called the Delay for a Transition object.
// Since a timer is tied directly to a Transition, the timers are index by the corresponding
// Transition's index in a LPNTranslator.
// The timers are named by an integer referred to as the index. The _indexToTimer array
// connects the index in the DBM sub-matrix to the index of the timer. For example,
// a the timer t1
// Representation invariant :
// Zones are immutable.
// Integer.MAX_VALUE is used to logically represent infinity.
// The lb and ub values for a timer should be set when the timer is enabled.
// A negative hash code indicates that the hash code has not been set.
// The index of the timer in _indexToTimer is the index in the DBM and should contain
// the zeroth timer.
// The array _indexToTimerPair should always be sorted.
// The index of the LPN should match where it is in the _lpnList, that is, if lpn is
// and LhpnFile object in _lpnList, then _lpnList[getLpnIndex()] == lpn.
/*
* Resource List :
* TODO : Create a list reference where the algorithms can be found that this class
* depends on.
*/
public static final int INFINITY = Integer.MAX_VALUE;
/* The lower and upper bounds of the times as well as the dbm. */
private int[][] _matrix;
/* Maps the index to the timer. The index is row/column of the DBM sub-matrix.
* Logically the zero timer is given index -1.
* */
//private int[] _indexToTimer;
private LPNTransitionPair[] _indexToTimerPair;
/* The hash code. */
private int _hashCode;
/* A lexicon between a transitions index and its name. */
//private static HashMap<Integer, Transition> _indexToTransition;
/* Set if a failure in the testSplit method has fired already. */
//private static boolean _FAILURE = false;
/* Hack to pass a parameter to the equals method though a variable */
//private boolean subsetting = false;
/* Stores the continuous variables that have rate zero */
// HashMap<LPNTransitionPair, Variable> _rateZeroContinuous;
//DualHashMap<RangeAndPairing, Variable> _rateZeroContinuous;
DualHashMap<LPNTransitionPair, VariableRangePair> _rateZeroContinuous;
/* Records the largest zone that occurs. */
public static int ZoneSize = 0;
private void checkZoneMaxSize(){
if(dbmSize() > ZoneSize){
ZoneSize = dbmSize();
}
}
private LhpnFile[] _lpnList;
/*
* Turns on and off subsets for the zones.
* True means subset will be considered.
* False means subsets will not be considered.
*/
private static boolean _subsetFlag = true;
/*
* Turns on and off supersets for zones.
* True means that supersets will be considered.
* False means that supersets will not be considered.
*/
private static boolean _supersetFlag = true;
/**
* Gets the value of the subset flag.
* @return
* True if subsets are requested, false otherwise.
*/
public static boolean getSubsetFlag(){
return _subsetFlag;
}
/**
* Sets the value of the subset flag.
* @param useSubsets
* The value for the subset flag. Set to true if
* supersets are to be considered, false otherwise.
*/
public static void setSubsetFlag(boolean useSubsets){
_subsetFlag = useSubsets;
}
/**
* Gets the value of the superset flag.
* @return
* True if supersets are to be considered, false otherwise.
*/
public static boolean getSupersetFlag(){
return _supersetFlag;
}
/**
* Sets the superset flag.
* @param useSupersets
* The value of the superset flag. Set to true if
* supersets are to be considered, false otherwise.
*/
public static void setSupersetFlag(boolean useSupersets){
_supersetFlag = useSupersets;
}
/**
* Construct a zone that has the given timers.
* @param timers
* The ith index of the array is the index of the timer. For example,
* if timers = [1, 3, 5], then the zeroth row/column of the DBM is the
* timer of the transition with index 1, the first row/column of the
* DBM is the timer of the transition with index 3, and the 2nd
* row/column is the timer of the transition with index 5. Do not
* include the zero timer.
* @param matrix
* The DBM augmented with the lower and upper bounds of the delays for the
* transitions. For example, suppose a zone has timers [1, 3, 5] (as
* described in the timers parameters). The delay for transition 1 is
* [1, 3], the delay for transition 3 is [2,5], and the delay for
* transition 5 is [4,6]. Also suppose the DBM is
* t0 t1 t3 t5
* t0 | 0, 3, 3, 3 |
* t1 | 0, 0, 0, 0 |
* t3 | 0, 0, 0, 0 |
* t5 | 0, 0, 0, 0 |
* Then the matrix that should be passed is
* lb t0 t1 t3 t5
* ub| 0, 0, 3, 5, 6|
* t0| 0, 0, 3, 3, 3|
* t1|-1, 0, 0, 0, 0|
* t3|-2, 0, 0, 0, 0|
* t5|-4, 0, 0, 0, 0|
* The matrix should be non-null and the zero timer should always be the
* first timer, even when there are no other timers.
*/
public Zone(int[] timers, int[][] matrix)
{
// A negative number indicates that the hash code has not been set.
_hashCode = -1;
// Make a copy to reorder the timers.
// _indexToTimer = Arrays.copyOf(timers, timers.length);
// Make a copy to reorder the timers.
_indexToTimerPair = new LPNTransitionPair[timers.length];
for(int i=0; i<timers.length; i++){
_indexToTimerPair[i] = new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN,
timers[i], true);
}
// Sorting the array.
// Arrays.sort(_indexToTimer);
// Sorting the array.
Arrays.sort(_indexToTimerPair);
//if(_indexToTimer[0] != 0)
// if(_indexToTimer[0] != -1)
// // Add the zeroth timer.
// int[] newIndexToTimer = new int[_indexToTimer.length+1];
// for(int i=0; i<_indexToTimer.length; i++)
// newIndexToTimer[i+1] = _indexToTimer[i];
// _indexToTimer = newIndexToTimer;
// _indexToTimer[0] = -1;
if(_indexToTimerPair[0].get_transitionIndex() != -1){
// Add the zeroth timer.
LPNTransitionPair[] newIndexToTimerPair =
new LPNTransitionPair[_indexToTimerPair.length];
for(int i=0; i<_indexToTimerPair.length; i++){
newIndexToTimerPair[i+1] = _indexToTimerPair[i];
}
_indexToTimerPair = newIndexToTimerPair;
_indexToTimerPair[0] = new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN, -1, true);
}
// if(_indexToTimer[0] < 0)
// // Add a zero timer.
// else if(_indexToTimer[0] > 0)
// int[] newTimerIndex = new int[_indexToTimer.length+1];
// for(int i=0; i<_indexToTimer.length; i++)
// newTimerIndex[i+1] = _indexToTimer[i];
// Map the old index of the timer to the new index of the timer.
HashMap<Integer, Integer> newIndex = new HashMap<Integer, Integer>();
// For the old index, find the new index.
for(int i=0; i<timers.length; i++)
{
// Since the zeroth timer is not included in the timers passed
// to the index in the DBM is 1 more than the index of the timer
// in the timers array.
//newIndex.put(i+1, Arrays.binarySearch(_indexToTimer, timers[i]));
LPNTransitionPair searchValue =
new LPNTransitionPair(LPNTransitionPair.SINGLE_LPN, timers[i], true);
newIndex.put(i+1, Arrays.binarySearch(_indexToTimerPair, searchValue));
}
// Add the zero timer index.
newIndex.put(0, 0);
// Initialize the matrix.
_matrix = new int[matrixSize()][matrixSize()];
// Copy the DBM
for(int i=0; i<dbmSize(); i++)
{
for(int j=0; j<dbmSize(); j++)
{
// Copy the passed in matrix to _matrix.
setDbmEntry(newIndex.get(i), newIndex.get(j),
matrix[dbmIndexToMatrixIndex(i)][dbmIndexToMatrixIndex(j)]);
// In the above, changed setDBMIndex to setdbm
}
}
// Copy in the upper and lower bounds. The zero time does not have an upper or lower bound
// so the index starts at i=1, the first non-zero timer.
for(int i=1; i< dbmSize(); i++)
{
setUpperBoundbydbmIndex(newIndex.get(i), matrix[0][dbmIndexToMatrixIndex(i)]);
// Note : The method setLowerBoundbydbmIndex, takes the value of the lower bound
// and the matrix stores the negative of the lower bound. So the matrix value
// must be multiplied by -1.
setLowerBoundbydbmIndex(newIndex.get(i), -1*matrix[dbmIndexToMatrixIndex(i)][0]);
}
recononicalize();
}
/**
* Initializes a zone according to the markings of state.
* @param currentState
* The zone is initialized as if all enabled timers
* have just been enabled.
*/
public Zone(State initialState)
{
// Extract the associated LPN.
LhpnFile lpn = initialState.getLpn();
int LPNIndex = lpn.getLpnIndex();
if(_lpnList == null){
// If no LPN exists yet, create it and put lpn in it.
_lpnList = new LhpnFile[LPNIndex+1];
_lpnList[LPNIndex] = lpn;
}
else if(_lpnList.length <= LPNIndex){
// The list does not contain the lpn.
LhpnFile[] tmpList = _lpnList;
_lpnList = new LhpnFile[LPNIndex+1];
_lpnList[LPNIndex] = lpn;
// Copy any that exist already.
for(int i=0; i<_lpnList.length; i++){
_lpnList[i] = tmpList[i];
}
}
else if(_lpnList[LPNIndex] != lpn){
// This checks that the appropriate lpn is in the right spot.
// If not (which gets you in this block), then this fixes it.
_lpnList[LPNIndex] = lpn;
}
// Default value for the hash code indicating that the hash code has not
// been set yet.
_hashCode = -1;
// Get the list of currently enabled Transitions by their index.
boolean[] enabledTran = initialState.getTranVector();
ArrayList<LPNTransitionPair> enabledTransitionsArrayList =
new ArrayList<LPNTransitionPair>();
LPNTransitionPair zeroPair = new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER, -1, true);
// Add the zero timer first.
enabledTransitionsArrayList.add(zeroPair);
// The index of the boolean value corresponds to the index of the Transition.
for(int i=0; i<enabledTran.length; i++){
if(enabledTran[i]){
enabledTransitionsArrayList.add(new LPNTransitionPair(LPNIndex, i, true));
}
}
_indexToTimerPair = enabledTransitionsArrayList.toArray(new LPNTransitionPair[0]);
_matrix = new int[matrixSize()][matrixSize()];
for(int i=1; i<dbmSize(); i++)
{
// Get the name for the timer in the i-th column/row of DBM
String tranName =
lpn.getTransition(_indexToTimerPair[i].get_transitionIndex()).getName();
ExprTree delay = lpn.getDelayTree(tranName);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
lpn.getAllVarsWithValuesAsString(initialState.getVector());
// Set the upper and lower bound.
int upper, lower;
if(delay.getOp().equals("uniform"))
{
ExprTree lowerDelay = delay.getLeftChild();
ExprTree upperDelay = delay.getRightChild();
lower = (int) lowerDelay.evaluateExpr(varValues);
upper = (int) upperDelay.evaluateExpr(varValues);
}
else
{
lower = (int) delay.evaluateExpr(varValues);
upper = lower;
}
setLowerBoundbydbmIndex(i, lower);
setUpperBoundbydbmIndex(i, upper);
}
// Advance the time and tighten the bounds.
advance();
recononicalize();
checkZoneMaxSize();
}
/**
* Creates a Zone based on the local states.
* @param localStates
* The current state (or initial) of the LPNs.
*/
public Zone(State[] localStates){
// Extract the local states.
//State[] localStates = tps.toStateArray();
// Initialize hash code to -1 (indicating nothing cached).
_hashCode = -1;
// Initialize the LPN list.
initialize_lpnList(localStates);
// Get the enabled transitions. This initializes the _indexTotimerPair
// which stores the relevant information.
// This method will also initialize the _rateZeroContinuous
initialize_indexToTimerPair(localStates);
// Initialize the matrix.
_matrix = new int[matrixSize()][matrixSize()];
// Set the lower bound/ upper bounds of the timers and the rates.
initializeLowerUpperBounds(getAllNames(), localStates);
// Initialize the row and column entries for the continuous variables.
initializeRowColumnContVar();
// Advance Time
advance();
// Re-canonicalize
recononicalize();
// Check the size of the DBM.
checkZoneMaxSize();
}
/**
* Gives the names of all the transitions and continuous variables that
* are represented by the zone.
* @return
* The names of the transitions and continuous variables that are
* represented by the zone.
*/
public String[] getAllNames(){
// String[] transitionNames = new String[_indexToTimerPair.length];
// transitionNames[0] = "The zero timer.";
// for(int i=1; i<transitionNames.length; i++){
// LPNTransitionPair ltPair = _indexToTimerPair[i];
// transitionNames[i] = _lpnList[ltPair.get_lpnIndex()]
// .getTransition(ltPair.get_transitionIndex()).getName();
// return transitionNames;
// Get the continuous variable names.
String[] contVar = getContVarNames();
// Get the transition names.
String[] trans = getTranNames();
// Create an array large enough for all the names.
String[] names = new String[contVar.length + trans.length + 1];
// Add the zero timer.
names[0] = "The zero timer.";
// Add the continuous variables.
for(int i=0; i<contVar.length; i++){
names[i+1] = contVar[i];
}
// Add the timers.
for(int i=0; i<trans.length; i++){
// Already the zero timer has been added and the elements of contVar.
// That's a total of 'contVar.length + 1' elements. The last index was
// thus 'contVar.length' So the first index to add to is
// 'contVar.length +1'.
names[1+contVar.length + i] = trans[i];
}
return names;
}
/**
* Get the names of the continuous variables that this zone uses.
* @return
* The names of the continuous variables that are part of this zone.
*/
public String[] getContVarNames(){
// List for accumulating the names.
ArrayList<String> contNames = new ArrayList<String>();
// Find the pairs that represent the continuous variables. Loop starts at
// i=1 since the i=0 is the zero timer.
for(int i=1; i<_indexToTimerPair.length; i++){
LPNTransitionPair ltPair = _indexToTimerPair[i];
// If the isTimer value is false, then this pair represents a continuous
// variable.
if(!ltPair.get_isTimer()){
// Get the LPN that this pairing references and find the name of
// the continuous variable whose index is given by this pairing.
contNames.add(_lpnList[ltPair.get_lpnIndex()]
.getContVarName(ltPair.get_transitionIndex()));
}
}
return contNames.toArray(new String[0]);
}
/**
* Gets the names of the transitions that are associated with the timers in the
* zone. Does not return the zero timer.
* @return
* The names of the transitions whose timers are in the zone except the zero
* timer.
*/
public String[] getTranNames(){
// List for accumulating the names.
ArrayList<String> transitionNames = new ArrayList<String>();
// Find the pairs that represent the transition timers.
for(int i=1; i<_indexToTimerPair.length; i++){
LPNTransitionPair ltPair = _indexToTimerPair[i];
// If the isTimer value is true, then this pair represents a timer.
if(ltPair.get_isTimer()){
// Get the LPN that this pairing references and find the name of the
// transition whose index is given by this pairing.
transitionNames.add(_lpnList[ltPair.get_lpnIndex()]
.getTransition(ltPair.get_transitionIndex()).getName());
}
}
return transitionNames.toArray(new String[0]);
}
/**
* Initializes the _lpnList using information from the local states.
* @param localStates
* The local states.
* @return
* The enabled transitions.
*/
private void initialize_lpnList(State[] localStates){
// Create the LPN list.
_lpnList = new LhpnFile[localStates.length];
// Get the LPNs.
for(int i=0; i<localStates.length; i++){
_lpnList[i] = localStates[i].getLpn();
}
}
/**
* Initializes the _indexToTimerPair from the local states. (Add more detail.
* This method also initializes the rate zero variables.)
* @param localStates
* The local states.
* @return
* The names of the transitions stored in the _indexToTimerPair (in the same order).
*/
private void initialize_indexToTimerPair(State[] localStates){
/*
* The populating of the _indexToTimerPair is done in three stages.
* The first is to add the zero timer which is at the beginning of the zone.
* The second is to add the continuous variables. And the third is to add
* the other timers. Since the continuous variables are added before the
* timers and the variables and timers are added in the order of the LPNs,
* the elements in an accumulating list (enabledTransitionsArrayList) are
* already in order up to the elements added for a particular LPN. Thus the
* only sorting that needs to take place is the sorting for a particular LPN.
* Correspondingly, elements are first found for an LPN and sort, then added
* to the main list.
*/
// This method will also initialize the _rateZeroContinuous
//_rateZeroContinuous = new DualHashMap<RangeAndPairing, Variable>();
_rateZeroContinuous =
new DualHashMap<LPNTransitionPair, VariableRangePair>();
// This list accumulates the transition pairs (ie timers) and the continuous
// variables.
ArrayList<LPNTransitionPair> enabledTransitionsArrayList =
new ArrayList<LPNTransitionPair>();
// Put in the zero timer.
enabledTransitionsArrayList
.add(new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER, -1, true));
// Get the continuous variables.
for(int i=0; i<localStates.length; i++){
// Accumulates the changing continuous variables for a single LPN.
ArrayList<LPNTransitionPair> singleLPN =
new ArrayList<LPNTransitionPair>();
// Get the associated LPN.
LhpnFile lpn = localStates[i].getLpn();
// Get the continuous variables for this LPN.
String[] continuousVariables = lpn.getContVars();
// Get the variable, index map.
DualHashMap<String, Integer> variableIndex = lpn.getContinuousIndexMap();
// Find which have a nonzero rate.
for(int j=0; j<continuousVariables.length; j++){
// Get the Variables with this name.
Variable contVar = lpn.getVariable(continuousVariables[j]);
// Get the rate.
//int rate = (int) Double.parseDouble(contVar.getInitRate());
IntervalPair rate = parseRate(contVar.getInitRate());
// Get the LPN index for the variable
int lpnIndex = lpn.getLpnIndex();
// Get the index as a variable for the LPN.
int contVariableIndex = variableIndex.get(continuousVariables[j]);
LPNTransitionPair newPair =
new LPNTransitionPair(lpnIndex, contVariableIndex, false);
// If the rate is non-zero, then the variables needs to be tracked
// by matrix part of the Zone.
//if(rate !=0){
if(!rate.equals(new IntervalPair(0,0))){
// Temporary exception guaranteeing only unit rates.
//if(rate != -1 && rate != 1){
if(rate.get_LowerBound() != 1 && rate.get_UpperBound() != 1){
throw new IllegalArgumentException("Current development " +
"only supports positive unit rates. The variable " + contVar +
" has a rate of " + rate);
}
// // Get the LPN index for the variable
// int lpnIndex = lpn.getLpnIndex();
// // Get the index as a variable for the LPN. This index matches
// // the index in the vector stored by platu.State.
// int contVariableIndex = variableIndex.get(continuousVariables[j]);
// The continuous variable reference.
// singleLPN.add(
// new LPNTransitionPair(lpnIndex, contVariableIndex, false));
singleLPN.add(newPair);
}
else{
// If the rate is zero, then the Zone keeps track of this variable
// in a list.
// _rateZeroContinuous.put(newPair, cpontVar);
// _rateZeroContinuous.
// put(new RangeAndPairing(newPair, parseRate(contVar.getInitValue())),
// contVar);
_rateZeroContinuous.
put(newPair, new VariableRangePair(contVar,
parseRate(contVar.getInitValue())));
}
}
// Sort the list.
Collections.sort(singleLPN);
// Add the list to the total accumulating list.
for(int j=0; j<singleLPN.size(); j++){
enabledTransitionsArrayList.add(singleLPN.get(j));
}
}
// Get the transitions.
for(int i=0; i<localStates.length; i++){
// Extract the enabled transition vector.
boolean[] enabledTran = localStates[i].getTranVector();
// Accumulates the transition pairs for one LPN.
ArrayList<LPNTransitionPair> singleLPN = new ArrayList<LPNTransitionPair>();
// The index of the boolean value corresponds to the index of the Transition.
for(int j=0; j<enabledTran.length; j++){
if(enabledTran[j]){
// Add the transition pair.
singleLPN.add(new LPNTransitionPair(i, j, true));
}
}
// Sort the transitions for the current LPN.
Collections.sort(singleLPN);
// Add the collection to the enabledTransitionsArrayList
for(int j=0; j<singleLPN.size(); j++){
enabledTransitionsArrayList.add(singleLPN.get(j));
}
}
// Extract out the array portion of the enabledTransitionsArrayList.
_indexToTimerPair = enabledTransitionsArrayList.toArray(new LPNTransitionPair[0]);
}
/**
* Sets the lower and upper bounds for the transitions and continuous variables.
* @param varNames
* The names of the transitions in _indexToTimerPair.
*/
private void initializeLowerUpperBounds(String[] varNames, State[] localStates){
// Traverse the entire length of the DBM sub-matrix except the zero row/column.
// This is the same length as the _indexToTimerPair.length-1. The DBM is used to
// match the idea of setting the value for each row.
for(int i=1; i<dbmSize(); i++){
// Get the current LPN and transition pairing.
LPNTransitionPair ltPair = _indexToTimerPair[i];
//int upper, lower;
IntervalPair range;
if(!ltPair.get_isTimer()){
// If the pairing represents a continuous variable, then the
// upper and lower bound are the initial value or infinity depending
// on whether the initial rate is positive or negative.
// If the value is a constant, then assign the upper and lower bounds
// to be constant. If the value is a range then assign the upper and
// lower bounds to be a range.
Variable v = _lpnList[ltPair.get_lpnIndex()]
.getContVar(ltPair.get_transitionIndex());
// int initialRate = (int) Double.parseDouble(v.getInitRate());
// upper = initialRate;
// lower = initialRate;
String rate = v.getInitRate();
// Parse the rate. Should be in the form of [x,y] where x
// and y are integers.
//IntervalPair range = parseRate(rate);
range = parseRate(rate);
// Set the upper and lower bound (in the matrix) for the
// continuous variables.
// TODO : Check if correct.
String contValue = v.getInitValue();
IntervalPair bound = parseRate(contValue);
// Set upper bound (DBM entry (0, x) where x is the index of the variable v).
setDbmEntryByPair(LPNTransitionPair.ZERO_TIMER_PAIR, ltPair, bound.get_UpperBound());
// Set lower bound (DBM entry (x, 0) where x is the index of the variable v).
setDbmEntryByPair(ltPair, LPNTransitionPair.ZERO_TIMER_PAIR, -1*bound.get_LowerBound());
// lower = range.get_LowerBound();
// upper = range.get_UpperBound();
}
else{
// Get the expression tree.
ExprTree delay = _lpnList[ltPair.get_lpnIndex()].getDelayTree(varNames[i]);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
_lpnList[ltPair.get_lpnIndex()]
.getAllVarsWithValuesAsString(localStates[ltPair.get_lpnIndex()].getVector());
// Set the upper and lower bound.
// Passing the zone as null since it should not be needed.
// if(delay.getOp().equals("uniform")){
// IntervalPair lowerRange = delay.getLeftChild()
// .evaluateExprBound(varValues, null);
// IntervalPair upperRange = delay.getRightChild()
// .evaluateExprBound(varValues, null);
// // The lower and upper bounds should evaluate to a single
// // value. Yell if they don't.
// if(!lowerRange.singleValue() || !upperRange.singleValue()){
// "the lower or the upper bound evaluated to a range " +
// "instead of a single value.");
// range = new IntervalPair(lowerRange.get_LowerBound(),
// upperRange.get_UpperBound());
// else{
// range = delay.evaluateExprBound(varValues, null);
range = delay.evaluateExprBound(varValues, this);
// int upper, lower;
// if(delay.getOp().equals("uniform"))
// ExprTree lowerDelay = delay.getLeftChild();
// ExprTree upperDelay = delay.getRightChild();
// lower = (int) lowerDelay.evaluateExpr(varValues);
// upper = (int) upperDelay.evaluateExpr(varValues);
// else
// lower = (int) delay.evaluateExpr(varValues);
// upper = lower;
}
// setLowerBoundbydbmIndex(i, lower);
// setUpperBoundbydbmIndex(i, upper);
setLowerBoundbydbmIndex(i, range.get_LowerBound());
setUpperBoundbydbmIndex(i, range.get_UpperBound());
}
}
/**
* Initialize the rows and columns for the continuous variables.
*/
private void initializeRowColumnContVar(){
/*
* TODO : Describe the idea behind the following algorithm.
*/
// for(int row=2; row<_indexToTimerPair.length; row++){
// // Note: row is indexing the row of the DBM matrix.
// LPNTransitionPair ltRowPair = _indexToTimerPair[row];
// if(ltRowPair.get_isTimer()){
// // If we reached the timers, stop.
// break;
// for(int col=1; col<row; col++){
// // Note: col is indexing the column of the DBM matrix.
// // The new (row, col) entry. The entry is given by col-row<= m_(row,col). Since
// // col <= m_(0,col) (its upper bound) and -row <= m_(row,0) (the negative of its lower
// // bound), the entry is given by col-row <= m(0,col) + m_(row,0) = m_(row,col);
// int rowCol = getDbmEntry(row,0) + getDbmEntry(0, col);
// // The new (col, row) entry.
// int colRow = getDbmEntry(col, 0) + getDbmEntry(0, row);
// setDbmEntry(row, col, rowCol);
// setDbmEntry(col, row, colRow);
// The only entries that do not need to be checked are the ones where both variables
// represent timers.
for(int row=2; row<_indexToTimerPair.length; row++){
// Note: row is indexing the row of the DBM matrix.
LPNTransitionPair ltRowPair = _indexToTimerPair[row];
// if(ltRowPair.get_isTimer()){
// // If we reached the timers, stop.
// break;
for(int col=1; col<row; col++){
// Note: col is indexing the column of the DBM matrix.
LPNTransitionPair ltColPair = _indexToTimerPair[col];
// If we've reached the part of the zone involving only timers, then break out
// of this row.
if(ltRowPair.get_isTimer() && ltColPair.get_isTimer()){
break;
}
// The new (row, col) entry. The entry is given by col-row<= m_(row,col). Since
// col <= m_(0,col) (its upper bound) and -row <= m_(row,0) (the negative of its lower
// bound), the entry is given by col-row <= m(0,col) + m_(row,0) = m_(row,col);
int rowCol = getDbmEntry(row,0) + getDbmEntry(0, col);
// The new (col, row) entry.
int colRow = getDbmEntry(col, 0) + getDbmEntry(0, row);
setDbmEntry(row, col, rowCol);
setDbmEntry(col, row, colRow);
}
}
}
/**
* Zero argument constructor for use in methods that create Zones where the members
* variables will be set by the method.
*/
private Zone()
{
_matrix = new int[0][0];
_indexToTimerPair = new LPNTransitionPair[0];
_hashCode = -1;
_lpnList = new LhpnFile[0];
_rateZeroContinuous = new DualHashMap<LPNTransitionPair, VariableRangePair>();
}
/**
* Gets the upper bound of a Transition from the zone.
* @param t
* The transition whose upper bound is wanted.
* @return
* The upper bound of Transition t.
*/
public int getUpperBoundbyTransition(Transition t)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair =
new LPNTransitionPair(lpnIndex, transitionIndex, true);
return getUpperBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair));
}
/**
* Returns the upper bound of the continuous variable.
* @param var
* The variable of interest.
* @return
* The upper bound of var.
*/
public int getUpperBoundbyContinuousVariable(String contVar, LhpnFile lpn){
// TODO : Finish.
// // Determine whether the variable is in the zone or rate zero.
// RangeAndPairing indexAndRange = _rateZeroContinuous.getKey(var);
// // If a RangeAndPairing is returned, then get the information from here.
// if(indexAndRange != null){
// return indexAndRange.get_range().get_UpperBound();
// // If indexAndRange is null, then try to get the value from the zone.
// int i=-1;
// for(i=0; i<_indexToTimerPair.length; i++){
// if(_indexToTimerPair[i].equals(var)){
// break;
// if(i < 0){
// + "a non-rate zero continuous variable that was not found in the "
// + "zone.");
// return getUpperBoundbydbmIndex(i);
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
//int contVarIndex = lpn.get
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
//Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and return the result.
if(pairing != null){
return pairing.get_range().get_UpperBound();
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the lower bound for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
return getUpperBoundbydbmIndex(i);
}
public int getUpperBoundForRate(LPNTransitionPair contVar){
// TODO : finish. Also note that for non-zero rate continuous
// variables, the method getUpperBoundbyContinuous variable does this.
return 0;
}
/**
* Get the value of the upper bound for the delay.
* @param index
* The timer's row/column of the DBM matrix.
* @return
* The upper bound on the transitions delay.
*/
public int getUpperBoundbydbmIndex(int index)
{
return _matrix[0][dbmIndexToMatrixIndex(index)];
}
/**
* Set the value of the upper bound for the delay.
* @param t
* The transition whose upper bound is being set.
* @param value
* The value of the upper bound.
*/
public void setUpperBoundbyTransition(Transition t, int value)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
setUpperBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair), value);
}
/**
* Set the value of the upper bound for the delay.
* @param index
* The timer's row/column of the DBM matrix.
* @param value
* The value of the upper bound.
*/
public void setUpperBoundbydbmIndex(int index, int value)
{
_matrix[0][dbmIndexToMatrixIndex(index)] = value;
}
/**
* Sets the upper bound for a transition described by an LPNTransitionPair.
* @param ltPair
* The index of the transition and the index of the associated LPN for
* the timer to set the upper bound.
* @param value
* The value for setting the upper bound.
*/
private void setUpperBoundByLPNTransitionPair(LPNTransitionPair ltPair, int value){
setUpperBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair), value);
}
/**
* Gets the lower bound of a Transition from the zone.
* @param t
* The transition whose upper bound is wanted.
* @return
* The lower bound of Transition t.
*/
public int getLowerBoundbyTransition(Transition t)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
return -1*getLowerBoundbydbmIndex(
Arrays.binarySearch(_indexToTimerPair, ltPair));
}
public int getLowerBoundbyContinuousVariable(String contVar, LhpnFile lpn){
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
//int contVarIndex = lpn.get
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
//Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and return the result.
if(pairing != null){
return pairing.get_range().get_LowerBound();
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the lower bound for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
return getLowerBoundbydbmIndex(i);
}
/**
* Get the value of the lower bound for the delay.
* @param index
* The timer's row/column of the DBM matrix.
* @return
* The value of the lower bound.
*/
public int getLowerBoundbydbmIndex(int index)
{
return _matrix[dbmIndexToMatrixIndex(index)][0];
}
/**
* Set the value of the lower bound for the delay.
* @param t
* The transition whose lower bound is being set.
* @param value
* The value of the lower bound.
*/
public void setLowerBoundbyTransition(Transition t, int value)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
setLowerBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair,ltPair), value);
}
/**
* Set the value of the upper bound for the delay.
* @param t
* The transition whose upper bound is being set.
* @param value
* The value of the upper bound.
*/
private void setLowerBoundByLPNTransitionPair(LPNTransitionPair ltPair, int value){
setLowerBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair,ltPair), value);
}
/**
* Set the value of the lower bound for the delay.
* @param index
* The timer's row/column of the DBM matrix.
* @param value
* The value of the lower bound.
*/
public void setLowerBoundbydbmIndex(int index, int value)
{
_matrix[dbmIndexToMatrixIndex(index)][0] = -1*value;
}
/**
* Give the upper and lower bounds for a continuous variable.
* @param contVar
* The variable of interest.
* @return
* The upper and lower bounds according to the Zone.
*/
public IntervalPair getContinuousBounds(String contVar, LhpnFile lpn){
/*
* Need to determine whether this is suppose to be a rate zero variable or a non-zero
* rate variable. One method is to check the rate of the passed variable. The other is
* to just check if the variable is present in either place.
*/
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
// Get the index of the continuous variable.
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
// Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and return the result.
if(pairing != null){
return pairing.get_range();
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the bounds for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
// Else find the upper and lower bounds.
int lower = getLowerBoundbydbmIndex(i);
int upper = getUpperBoundbydbmIndex(i);
return new IntervalPair(lower, upper);
}
/**
* Sets the bounds for a continuous variable.
* @param contVar
* The continuous variable to set the bounds on.
* @param lpn
* The LhpnFile object that contains the variable.
* @param range
* The new range of the continuous variable.
*/
public void setContinuousBounds(String contVar, LhpnFile lpn,
IntervalPair range){
// Extract the necessary indecies.
int lpnIndex = lpn.getLpnIndex();
// Get the index of the continuous variable.
DualHashMap<String, Integer> variableIndecies = lpn.getContinuousIndexMap();
int contIndex = variableIndecies.get(contVar);
// Package the indecies with false indicating not a timer.
LPNTransitionPair index = new LPNTransitionPair(lpnIndex, contIndex, false);
// Search for the continuous variable in the rate zero variables.
VariableRangePair pairing = _rateZeroContinuous.get(index);
// If Pairing is not null, the variable was found and make the new assignment.
if(pairing != null){
pairing.set_range(range);
return;
}
// If Pairing was null, the variable was not found. Search for the variable
// in the zone portion.
int i = Arrays.binarySearch(_indexToTimerPair, index);
// If i < 0, the search was unsuccessful, so scream.
if(i < 0){
throw new IllegalArgumentException("Atempted to find the bounds for "
+ "a non-rate zero continuous variable that was not found in the "
+ "zone.");
}
// Else find the upper and lower bounds.
setLowerBoundbydbmIndex(i, range.get_LowerBound());
setUpperBoundbydbmIndex(i, range.get_UpperBound());
}
/**
* Converts the index of the DBM to the index of _matrix.
* @param i
* The row/column index of the DBM.
* @return
* The row/column index of _matrix.
*/
private int dbmIndexToMatrixIndex(int i)
{
return i+1;
}
/**
* Retrieves an entry of the DBM using the DBM's addressing.
* @param i
* The row of the DBM.
* @param j
* The column of the DBM.
* @return
* The value of the (i, j) element of the DBM.
*/
public int getDbmEntry(int i, int j)
{
return _matrix[dbmIndexToMatrixIndex(i)][dbmIndexToMatrixIndex(j)];
}
/**
* Sets an entry of the DBM using the DBM's addressing.
* @param i
* The row of the DBM.
* @param j
* The column of the DBM.
* @param value
* The new value for the entry.
*/
private void setDbmEntry(int i, int j, int value)
{
_matrix[dbmIndexToMatrixIndex(i)][dbmIndexToMatrixIndex(j)] = value;
}
/**
* Sets the entry in the DBM using the LPNTransitionPair indexing.
* @param row
* The LPNTransitionPair for the row.
* @param col
* The LPNTransitionPair for the column.
* @param value
* The value to set the entry to.
*/
private void setDbmEntryByPair(LPNTransitionPair row, LPNTransitionPair col, int value){
// The row index.
int i = timerIndexToDBMIndex(row);
// The column index.
int j = timerIndexToDBMIndex(col);
setDbmEntry(i, j, value);
}
/**
* Returns the index of the the transition in the DBM given a LPNTransitionPair pairing
* the transition index and associated LPN index.
* @param ltPair
* The pairing comprising the index of the transition and the index of the associated
* LPN.
* @return
* The row/column of the DBM associated with the ltPair.
*/
private int timerIndexToDBMIndex(LPNTransitionPair ltPair)
{
return Arrays.binarySearch(_indexToTimerPair, ltPair);
}
/**
* The matrix labeled with 'ti' where i is the transition index associated with the timer.
*/
public String toString()
{
// TODO : Add the rate zero continuous variables.
String result = "Timer and delay.\n";
int count = 0;
// Print the timers.
for(int i=1; i<_indexToTimerPair.length; i++, count++)
{
if(_lpnList.length == 0)
{
// If no LPN's are associated with this Zone, use the index of the timer.
result += " t" + _indexToTimerPair[i].get_transitionIndex() + " : ";
}
else
{
String name;
// If the current LPNTransitionPair is a timer, get the name
// from the transitions.
if(_indexToTimerPair[i].get_isTimer()){
// Get the name of the transition.
Transition tran = _lpnList[_indexToTimerPair[i].get_lpnIndex()].
getTransition(_indexToTimerPair[i].get_transitionIndex());
name = tran.getName();
}
else{
// If the current LPNTransitionPair is not a timer, get the
// name as a continuous variable.
Variable var = _lpnList[_indexToTimerPair[i].get_lpnIndex()]
.getContVar(_indexToTimerPair[i].get_transitionIndex());
name = var.getName() + "rate";
}
// result += " " + tran.getName() + ":";
result += " " + name + ":";
}
result += "[ " + -1*getLowerBoundbydbmIndex(i) + ", " + getUpperBoundbydbmIndex(i) + " ]";
if(count > 9)
{
result += "\n";
count = 0;
}
}
result += "\nDBM\n";
// Print the DBM.
for(int i=0; i<_indexToTimerPair.length; i++)
{
result += "| " + getDbmEntry(i, 0);
for(int j=1; j<_indexToTimerPair.length; j++)
{
result += ", " + getDbmEntry(i, j);
}
result += " |\n";
}
return result;
}
/**
* Tests for equality. Overrides inherited equals method.
* @return True if o is equal to this object, false otherwise.
*/
public boolean equals(Object o)
{
// Check if the reference is null.
if(o == null)
{
return false;
}
// Check that the type is correct.
if(!(o instanceof Zone))
{
return false;
}
// Check for equality using the Zone equality.
return equals((Zone) o);
}
/**
* Tests for equality.
* @param
* The Zone to compare.
* @return
* True if the zones are non-null and equal, false otherwise.
*/
public boolean equals(Zone otherZone)
{
// Check if the reference is null first.
if(otherZone == null)
{
return false;
}
// Check for reference equality.
if(this == otherZone)
{
return true;
}
// If the hash codes are different, then the objects are not equal.
if(this.hashCode() != otherZone.hashCode())
{
return false;
}
// Check if the they have the same number of timers.
if(this._indexToTimerPair.length != otherZone._indexToTimerPair.length){
return false;
}
// Check if the timers are the same.
for(int i=0; i<this._indexToTimerPair.length; i++){
if(!(this._indexToTimerPair[i].equals(otherZone._indexToTimerPair[i]))){
return false;
}
}
// Check if the matrix is the same
for(int i=0; i<_matrix.length; i++)
{
for(int j=0; j<_matrix[0].length; j++)
{
if(!(this._matrix[i][j] == otherZone._matrix[i][j]))
{
return false;
}
}
}
return true;
}
/**
* Determines if this zone is a subset of Zone otherZone.
* @param otherZone
* The zone to compare against.
* @return
* True if this is a subset of other; false otherwise.
*/
public boolean subset(Zone otherZone){
// Check if the reference is null first.
if(otherZone == null)
{
return false;
}
// Check for reference equality.
if(this == otherZone)
{
return true;
}
// Check if the the same number of timers are present.
if(this._indexToTimerPair.length != otherZone._indexToTimerPair.length){
return false;
}
// Check if the transitions are the same.
for(int i=0; i<this._indexToTimerPair.length; i++){
if(!(this._indexToTimerPair[i].equals(otherZone._indexToTimerPair[i]))){
return false;
}
}
// Check if the entries of this Zone are less than or equal to the entries
// of the other Zone.
for(int i=0; i<_matrix.length; i++)
{
for(int j=0; j<_matrix[0].length; j++)
{
if(!(this._matrix[i][j] <= otherZone._matrix[i][j])){
return false;
}
}
}
return true;
}
/**
* Determines if this zone is a superset of Zone otherZone.
* @param otherZone
* The zone to compare against.
* @return
* True if this is a subset of other; false otherwise. More specifically it
* gives the result of otherZone.subset(this). Thus it agrees with the subset method.
*/
public boolean superset(Zone otherZone){
return otherZone.subset(this);
}
/**
* Overrides the hashCode.
*/
public int hashCode()
{
// Check if the hash code has been set.
if(_hashCode <0)
{
_hashCode = createHashCode();
}
return _hashCode;
}
/**
* Creates a hash code for a Zone object.
* @return
* The hash code.
*/
private int createHashCode()
{
int newHashCode = Arrays.hashCode(_indexToTimerPair);
for(int i=0; i<_matrix.length; i++)
{
newHashCode ^= Arrays.hashCode(_matrix[i]);
}
return Math.abs(newHashCode);
}
/**
* The size of the DBM sub matrix. This is calculated using the size of _indexToTimer.
* @return
* The size of the DBM.
*/
private int dbmSize()
{
return _indexToTimerPair.length;
}
/**
* The size of the matrix.
* @return
* The size of the matrix. This is calculated using the size of _indexToTimer.
*/
private int matrixSize()
{
return _indexToTimerPair.length + 1;
}
/**
* Performs the Floyd's least pairs algorithm to reduce the DBM.
*/
private void recononicalize()
{
for(int k=0; k<dbmSize(); k++)
{
for (int i=0; i<dbmSize(); i++)
{
for(int j=0; j<dbmSize(); j++)
{
if(getDbmEntry(i, k) != INFINITY && getDbmEntry(k, j) != INFINITY
&& getDbmEntry(i, j) > getDbmEntry(i, k) + getDbmEntry(k, j))
{
setDbmEntry(i, j, getDbmEntry(i, k) + getDbmEntry(k, j));
}
if( (i==j) && getDbmEntry(i, j) != 0)
{
throw new DiagonalNonZeroException("Entry (" + i + ", " + j + ")" +
" became " + getDbmEntry(i, j) + ".");
}
}
}
}
}
/**
* Determines if a timer associated with a given transitions has reached its lower bound.
* @param t
* The transition to consider.
* @return
* True if the timer has reached its lower bound, false otherwise.
*/
public boolean exceedsLowerBoundbyTransitionIndex(Transition t)
{
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
return exceedsLowerBoundbydbmIndex(Arrays.binarySearch(_indexToTimerPair, ltPair));
}
/**
* Determines if a timer has reached its lower bound.
* @param timer
* The timer's index.
* @return
* True if the timer has reached its lower bound, false otherwise.
*/
public boolean exceedsLowerBoundbydbmIndex(int index)
{
// Note : Make sure that the lower bound is stored as a negative number
// and that the inequality is correct.
return _matrix[0][dbmIndexToMatrixIndex(index)] <=
_matrix[1][dbmIndexToMatrixIndex(index)];
}
/* (non-Javadoc)
* @see verification.timed_state_exploration.zone.Zone#fireTransitionbyTransitionIndex(int, int[], verification.platu.stategraph.State)
*/
// public Zone fireTransitionbyTransitionIndex(int timer, int[] enabledTimers,
// State state)
// // TODO: Check if finish.
// int index = Arrays.binarySearch(_indexToTimer, timer);
// //return fireTransitionbydbmIndex(Arrays.binarySearch(_indexToTimer, timer),
// //enabledTimers, state);
// // Check if the value is in this zone to fire.
// if(index < 0){
// return this;
// return fireTransitionbydbmIndex(index, enabledTimers, state);
/**
* Gives the Zone obtained by firing a given Transitions.
* @param t
* The transitions being fired.
* @param enabledTran
* The list of currently enabled Transitions.
* @param localStates
* The current local states.
* @return
* The Zone obtained by firing Transition t with enabled Transitions enabled
* enabledTran when the current state is localStates.
*/
public Zone fire(Transition t, LpnTranList enabledTran, State[] localStates){
// Create the LPNTransitionPair to check if the Transitions is in the zone and to
// find the index.
LhpnFile lpn = t.getLpn();
int lpnIndex = lpn.getLpnIndex();
int transitionIndex = t.getIndex();
LPNTransitionPair ltPair = new LPNTransitionPair(lpnIndex, transitionIndex, true);
int dbmIndex = Arrays.binarySearch(_indexToTimerPair, ltPair);
if(dbmIndex <= 0){
return this;
}
// Get the new zone portion.
Zone newZone = fireTransitionbydbmIndex(dbmIndex, enabledTran, localStates);
// Update any assigned continuous variables.
newZone.updateContinuousAssignment(t, localStates[lpnIndex]);
//return fireTransitionbydbmIndex(dbmIndex, enabledTran, localStates);
return newZone;
}
/**
* Updates the Zone according to the transition firing.
* @param index
* The index of the timer.
* @return
* The updated Zone.
*/
public Zone fireTransitionbydbmIndex(int index, LpnTranList enabledTimers,
State[] localStates)
{
Zone newZone = new Zone();
// Copy the LPNs over.
newZone._lpnList = new LhpnFile[this._lpnList.length];
for(int i=0; i<this._lpnList.length; i++){
newZone._lpnList[i] = this._lpnList[i];
}
// Copy the continuous variables over.
newZone._rateZeroContinuous = this._rateZeroContinuous.clone();
// Extract the pairing information for the enabled timers.
// Using the enabledTimersList should be faster than calling the get method
// several times.
newZone._indexToTimerPair = new LPNTransitionPair[enabledTimers.size() + 1];
int count = 0;
newZone._indexToTimerPair[count++] =
new LPNTransitionPair(LPNTransitionPair.ZERO_TIMER, -1, true);
for(Transition t : enabledTimers){
newZone._indexToTimerPair[count++] =
new LPNTransitionPair(t.getLpn().getLpnIndex(), t.getIndex(), true);
}
Arrays.sort(newZone._indexToTimerPair);
HashSet<LPNTransitionPair> newTimers = new HashSet<LPNTransitionPair>();
HashSet<LPNTransitionPair> oldTimers = new HashSet<LPNTransitionPair>();
for(int i=0; i<newZone._indexToTimerPair.length; i++)
{
// Determine if each value is a new timer or old.
if(Arrays.binarySearch(this._indexToTimerPair, newZone._indexToTimerPair[i])
>= 0 )
{
// The timer was already present in the zone.
oldTimers.add(newZone._indexToTimerPair[i]);
}
else
{
// The timer is a new timer.
newTimers.add(newZone._indexToTimerPair[i]);
}
}
// Create the new matrix.
newZone._matrix = new int[newZone.matrixSize()][newZone.matrixSize()];
// TODO: For simplicity, make a copy of the current zone and perform the
// restriction and re-canonicalization. Later add a copy re-canonicalization
// that does the steps together.
Zone tempZone = this.clone();
tempZone.restrict(index);
tempZone.recononicalize();
// Copy the tempZone to the new zone.
for(int i=0; i<tempZone.dbmSize(); i++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
continue;
}
// Get the new index of for the timer.
int newIndexi = i==0 ? 0 :
Arrays.binarySearch(newZone._indexToTimerPair, tempZone._indexToTimerPair[i]);
for(int j=0; j<tempZone.dbmSize(); j++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[j]))
{
continue;
}
int newIndexj = j==0 ? 0 :
Arrays.binarySearch(newZone._indexToTimerPair, tempZone._indexToTimerPair[j]);
newZone._matrix[newZone.dbmIndexToMatrixIndex(newIndexi)]
[newZone.dbmIndexToMatrixIndex(newIndexj)]
= tempZone.getDbmEntry(i, j);
}
}
// Copy the upper and lower bounds.
for(int i=1; i<tempZone.dbmSize(); i++)
{
if(!oldTimers.contains(tempZone._indexToTimerPair[i]))
{
continue;
}
newZone.setLowerBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
-1*tempZone.getLowerBoundbydbmIndex(i));
// The minus sign is because _matrix stores the negative of the lower bound.
newZone.setUpperBoundByLPNTransitionPair(tempZone._indexToTimerPair[i],
tempZone.getUpperBoundbydbmIndex(i));
}
// Copy in the new relations for the new timers.
for(LPNTransitionPair timerNew : newTimers)
{
for(LPNTransitionPair timerOld : oldTimers)
{
newZone.setDbmEntry(newZone.timerIndexToDBMIndex(timerNew),
newZone.timerIndexToDBMIndex(timerOld),
tempZone.getDbmEntry(0, tempZone.timerIndexToDBMIndex(timerOld)));
// int newTimeIndex = newZone.timerIndexToDBMIndex(timerNew);
// int oldTimeIndex = newZone.timerIndexToDBMIndex(timerOld);
// int value = tempZone.getDbmEntry(0, oldTimeIndex);
newZone.setDbmEntry(newZone.timerIndexToDBMIndex(timerOld),
newZone.timerIndexToDBMIndex(timerNew),
tempZone.getDbmEntry(tempZone.timerIndexToDBMIndex(timerOld), 0));
}
}
// Set the upper and lower bounds for the new timers.
for(LPNTransitionPair pair : newTimers){
// Get all the upper and lower bounds for the new timers.
// Get the name for the timer in the i-th column/row of DBM
//String tranName = indexToTran.get(i).getName();
String tranName = _lpnList[pair.get_lpnIndex()]
.getTransition(pair.get_transitionIndex()).getName();
ExprTree delay = _lpnList[pair.get_lpnIndex()].getDelayTree(tranName);
// Get the values of the variables for evaluating the ExprTree.
HashMap<String, String> varValues =
_lpnList[pair.get_lpnIndex()]
.getAllVarsWithValuesAsString(localStates[pair.get_lpnIndex()].getVector());
// Set the upper and lower bound.
int upper, lower;
if(delay.getOp().equals("uniform"))
{
// ExprTree lowerDelay = delay.getLeftChild();
// ExprTree upperDelay = delay.getRightChild();
// lower = (int) lowerDelay.evaluateExpr(varValues);
// upper = (int) upperDelay.evaluateExpr(varValues);
IntervalPair lowerRange = delay.getLeftChild()
.evaluateExprBound(varValues, null);
IntervalPair upperRange = delay.getRightChild()
.evaluateExprBound(varValues, null);
// The lower and upper bounds should evaluate to a single
// value. Yell if they don't.
if(!lowerRange.singleValue() || !upperRange.singleValue()){
throw new IllegalStateException("When evaulating the delay, " +
"the lower or the upper bound evaluated to a range " +
"instead of a single value.");
}
lower = lowerRange.get_LowerBound();
upper = upperRange.get_UpperBound();
}
else
{
// lower = (int) delay.evaluateExpr(varValues);
// upper = lower;
IntervalPair range = delay.evaluateExprBound(varValues, this);
lower = range.get_LowerBound();
upper = range.get_UpperBound();
}
newZone.setLowerBoundByLPNTransitionPair(pair, lower);
newZone.setUpperBoundByLPNTransitionPair(pair, upper);
}
newZone.advance();
newZone.recononicalize();
newZone.checkZoneMaxSize();
return newZone;
}
/**
* Advances time.
*/
private void advance()
{
for(int i=0; i<dbmSize(); i++)
{
_matrix[dbmIndexToMatrixIndex(0)][dbmIndexToMatrixIndex(i)] =
getUpperBoundbydbmIndex(i);
}
}
/**
* Finds the maximum amount that time cam advance.
* @return
* value.
* The maximum amount that time can advance before a timer expires or an inequality changes
*/
private int maxAdvance(LPNTransitionPair contVar, State[] localStates){
/*
* Several comments in this function may look like C code. That's because,
* well it is C code from atacs/src/lhpnrsg.c.
*/
// Get the continuous variable in question.
int lpnIndex = contVar.get_lpnIndex();
int varIndex = contVar.get_transitionIndex();
Variable variable = _lpnList[lpnIndex].getContVar(varIndex);
// int lhpnCheckPreds(int p,ineqList &ineqL,lhpnStateADT s,ruleADT **rules,
// int nevents,eventADT *events)
//#ifdef __LHPN_TRACE__
//printf("lhpnCheckPreds:begin()\n");
//#endif
//int min = INFIN;
//int newMin = INFIN;
int min = INFINITY;
int newMin = INFINITY;
//int zoneP = getIndexZ(s->z,-2,p);
//for(unsigned i=0;i<ineqL.size();i++) {
// if(ineqL[i]->type > 4) {
// continue;
//#ifdef __LHPN_PRED_DEBUG__
// printf("Zone to check...\n");
// printZ(s->z,events,nevents,s->r);
// printf("Checking ...");
// printI(ineqL[i],events);
// printf("\n");
//#endif
// if(ineqL[i]->place == p) {
// Get all the inequalities that reference the variable of interest.
ArrayList<InequalityVariable> inequalities = variable.getInequalities();
for(InequalityVariable ineq : inequalities){
// ineq_update(ineqL[i],s,nevents);
// Update the inequality variable.
int ineqValue = ineq.evaluate(localStates[varIndex], this);
// if(ineqL[i]->type <= 1) {
// /* Working on a > or >= ineq */
if(ineq.get_op().equals(">") || ineq.get_op().equals(">=")){
// Working on a > or >= ineq
// if(s->r->bound[p-nevents].current > 0) {
// If the rate is positive.
if(getUpperBoundForRate(contVar) > 0){
// if(s->m->state[ineqL[i]->signal]=='1') {
if(ineqValue != 0){
// if(s->z->matrix[zoneP][0] <
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 1a\n");
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 1.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
if(getDbmEntry(0, contVar.get_transitionIndex())
< ineq.getConstant()){
// CP: case 1a.
newMin = getDbmEntry(0, contVar.get_transitionIndex());
}
// else if((-1)*s->z->matrix[0][zoneP] >
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 2a\n");
//#endif
// newMin = chkDiv(events[p]->urange,
// s->r->bound[p-nevents].current,'F');
else if ((-1)*getDbmEntry(contVar.get_transitionIndex(),0)
> ineq.getConstant()){
// CP : case 2a
newMin = 0;
}
// else {
// /* straddle case */
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 3a\n");
//#endif
// newMin = chkDiv(events[p]->urange,
// s->r->bound[p-nevents].current,'F');
else{
// Straddle case
// CP : case 3a
newMin = 0;
}
}
else{
// else {
// if(s->z->matrix[zoneP][0] <
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 4a -- min: %d\n",chkDiv(ineqL[i]->constant,s->r->bound[p-nevents].current,'F'));
//#endif
// newMin = chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F');
if(getDbmEntry(contVar.get_transitionIndex(), 0)
< ineq.getConstant()){
// CP: case 4a -- min
newMin = ineq.getConstant();
}
// else if((-1)*s->z->matrix[0][zoneP] >
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 5a\n");
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 3.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
else if((-1)*getDbmEntry(contVar.get_transitionIndex(),0)
< ineq.getConstant()){
// Impossible case 3.
newMin = getDbmEntry(contVar.get_transitionIndex(),0);
}
// else {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 6a -- min: %d\n",s->z->matrix[zoneP][0]);
//#endif
// /* straddle case */
// newMin = s->z->matrix[zoneP][0];
else{
// CP : cas 6a
// straddle case
newMin = getDbmEntry(contVar.get_transitionIndex(),0);
}
}
}
// else {
// /* warp <= 0 */
else{
// warp <= 0.
// if(s->m->state[ineqL[i]->signal]=='1') {
if( ineqValue != 1){
// if(s->z->matrix[0][zoneP] <
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 7a\n");
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 2.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
if(getDbmEntry(0, contVar.get_transitionIndex())
< (-1) * ineq.getConstant()){
// CP: case 7a.
newMin = getDbmEntry(contVar.get_transitionIndex(),0);
}
// else if((-1)*s->z->matrix[zoneP][0] >
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 8a\n");
//#endif
// newMin = chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F');
else if((-1)*getDbmEntry(0, contVar.get_transitionIndex())
< (-1)*ineq.getConstant()){
// Impossible case 8a.
newMin = ineq.getConstant();
}
// else {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 9a\n");
//#endif
// /* straddle case */
// newMin = s->z->matrix[zoneP][0];
else{
// straddle case
newMin = getDbmEntry(0, contVar.get_transitionIndex());
}
}
// else {
else{
// if(s->z->matrix[0][zoneP] <
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 10a\n");
//#endif
// newMin = chkDiv(events[p]->lrange,
// s->r->bound[p-nevents].current,'F');
if(getDbmEntry(contVar.get_transitionIndex(),0)
< (-1) * ineq.getConstant()){
// CP: case 10a.
newMin = 0;
}
// else if((-1)*s->z->matrix[zoneP][0] >
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 11a\n");
// printf("z=%d c=%d b=%d\n",
// s->z->matrix[zoneP][0],
// ineqL[i]->constant,
// s->r->bound[p-nevents].current);
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 4.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
else if((-1)*getDbmEntry(0, contVar.get_transitionIndex())
< (-1) * ineq.getConstant()){
// CP: case 7a.
newMin = getDbmEntry(0, contVar.get_transitionIndex());
}
// else {
// /* straddle case */
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 12a\n");
//#endif
// newMin = chkDiv(events[p]->lrange,
// s->r->bound[p-nevents].current,'F');
else{
// straddle case
newMin = 0;
}
}
}
}
// else {
// /* Working on a < or <= ineq */
else{
// Working on a < or <= ineq
// if(s->r->bound[p-nevents].current > 0) {
if(getUpperBoundForRate(contVar) > 0){
// if(s->m->state[ineqL[i]->signal]=='1') {
if(ineqValue != 0){
// if(s->z->matrix[zoneP][0] <
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 1b -- min: %d\n",chkDiv(ineqL[i]->constant,s->r->bound[p-nevents].current,'F'));
//#endif
// newMin = chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F');
if(getDbmEntry(0, contVar.get_transitionIndex())
< (-1) * ineq.getConstant()){
// CP: case 1b -- min.
newMin = ineq.getConstant();
}
// else if((-1)*s->z->matrix[0][zoneP] >
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 2b\n");
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 5.\n");
//#endif
// newMin = chkDiv(events[p]->urange,
// s->r->bound[p-nevents].current,'F');
if((-1)*getDbmEntry(contVar.get_transitionIndex(), 0)
< ineq.getConstant()){
// CP: case 2b.
newMin = 0;
}
// else {
// /* straddle case */
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 3b -- min: %d\n",s->z->matrix[zoneP][0]);
//#endif
// newMin = s->z->matrix[zoneP][0];
else{
//straddle case
newMin = getDbmEntry(0,contVar.get_transitionIndex());
}
}
// else {
else{
// if(s->z->matrix[zoneP][0] <
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 4b\n");
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 7.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
if(getDbmEntry(0, contVar.get_transitionIndex())
< ineq.getConstant()){
// CP: case 4b.
newMin = getDbmEntry(0, contVar.get_transitionIndex());
}
// else if((-1)*s->z->matrix[0][zoneP] >
// chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 5b\n");
//#endif
// newMin = chkDiv(events[p]->urange,
// s->r->bound[p-nevents].current,'F');
else if((-1)*getDbmEntry(contVar.get_transitionIndex(), 0)
< ineq.getConstant()){
// CP: case 5b.
newMin = 0;
}
// else {
// /* straddle case */
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 6b\n");
//#endif
// newMin = chkDiv(events[p]->urange,
// s->r->bound[p-nevents].current,'F');
else{
// straddle case
// CP : case 6b
newMin = 0;
}
}
}
// else {
// /* warp <= 0 */
else {
// warp <=0
// if(s->m->state[ineqL[i]->signal]=='1') {
if(ineqValue != 0){
// if(s->z->matrix[0][zoneP] <
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 7b\n");
//#endif
// newMin = chkDiv(events[p]->lrange,
// s->r->bound[p-nevents].current,'F');
if(getDbmEntry(contVar.get_transitionIndex(), 0)
< ineq.getConstant()){
// CP: case 7b.
newMin = getDbmEntry(0, contVar.get_transitionIndex());
}
// else if((-1)*s->z->matrix[zoneP][0] >
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 8b\n");
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 8.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
// else {
// /* straddle case */
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 9b\n");
//#endif
// newMin = chkDiv(events[p]->lrange,
// s->r->bound[p-nevents].current,'F');
}
// else {
else {
// if(s->z->matrix[0][zoneP] <
// chkDiv((-1)*ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 10b\n");
// printf("zone: %d const: %d warp: %d chkDiv: %d\n",s->z->matrix[0][zoneP],ineqL[i]->constant,s->r->bound[p-nevents].current,chkDiv((-1)*ineqL[i]->constant,s->r->bound[p-nevents].current,'F'));
//#endif
//#ifdef __LHPN_WARN__
// warn("checkPreds: Impossible case 6.\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
// else if((-1)*s->z->matrix[zoneP][0] >
// (-1)*chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F')) {
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 11b\n");
//#endif
// newMin = chkDiv(ineqL[i]->constant,
// s->r->bound[p-nevents].current,'F');
// else {
// /* straddle case */
//#ifdef __LHPN_PRED_DEBUG__
// printf("CP:case 12b\n");
//#endif
// newMin = s->z->matrix[zoneP][0];
// if(newMin < min) {
// min = newMin;
}
}
}
// Check if the value can be lowered.
if(newMin < min){
min = newMin;
}
}
//#ifdef __LHPN_PRED_DEBUG__
//printf("Min leaving checkPreds for %s: %d\n",events[p]->event,min);
//#endif
//return min;
return min;
}
/* (non-Javadoc)
* @see java.lang.Object#clone()
*/
public Zone clone()
{
// TODO: Check if finished.
Zone clonedZone = new Zone();
clonedZone._matrix = new int[this.matrixSize()][this.matrixSize()];
for(int i=0; i<this.matrixSize(); i++)
{
for(int j=0; j<this.matrixSize(); j++)
{
clonedZone._matrix[i][j] = this._matrix[i][j];
}
}
clonedZone._indexToTimerPair = Arrays.copyOf(_indexToTimerPair, _indexToTimerPair.length);
clonedZone._hashCode = this._hashCode;
clonedZone._lpnList = Arrays.copyOf(this._lpnList, this._lpnList.length);
clonedZone._rateZeroContinuous = this._rateZeroContinuous.clone();
return clonedZone;
}
/**
* Restricts the lower bound of a timer.
*
* @param timer
* The timer to tighten the lower bound.
*/
private void restrict(int timer)
{
//int dbmIndex = Arrays.binarySearch(_indexToTimer, timer);
_matrix[dbmIndexToMatrixIndex(timer)][dbmIndexToMatrixIndex(0)]
= getLowerBoundbydbmIndex(timer);
}
/**
* The list of enabled timers.
* @return
* The list of all timers that have reached their lower bounds.
*/
public List<Transition> getEnabledTransitions()
{
ArrayList<Transition> enabledTransitions = new ArrayList<Transition>();
// Check if the timer exceeds its lower bound staring with the first nonzero
// timer.
for(int i=1; i<_indexToTimerPair.length; i++)
{
if(getDbmEntry(0, i) >= -1 * getLowerBoundbydbmIndex(i))
{
enabledTransitions.add(_lpnList[_indexToTimerPair[i].get_lpnIndex()]
.getTransition(_indexToTimerPair[i].get_transitionIndex()));
}
}
return enabledTransitions;
}
/**
* Gives the list of enabled transitions associated with a particular LPN.
* @param LpnIndex
* The Index of the LPN the Transitions are a part of.
* @return
* A List of the Transitions that are enabled in the LPN given by the index.
*/
public List<Transition> getEnabledTransitions(int LpnIndex){
ArrayList<Transition> enabledTransitions = new ArrayList<Transition>();
// Check if the timer exceeds its lower bound staring with the first nonzero
// timer.
for(int i=1; i<_indexToTimerPair.length; i++)
{
if(getDbmEntry(0, i) >= -1 * getLowerBoundbydbmIndex(i))
{
LPNTransitionPair ltPair = _indexToTimerPair[i];
if( ltPair.get_lpnIndex() == LpnIndex){
enabledTransitions.add(_lpnList[ltPair.get_lpnIndex()]
.getTransition(ltPair.get_transitionIndex()));
}
}
}
return enabledTransitions;
}
/**
* Updates the continuous variables that are set by firing a transition.
* @param firedTran
* The transition that fired.
* @param s
* The current (local) state.
*/
public void updateContinuousAssignment(Transition firedTran, State s){
// Get the LPN.
LhpnFile lpn = _lpnList[firedTran.getLpn().getLpnIndex()];
// Get the current values of the (local) state.
HashMap<String,String> currentValues =
lpn.getAllVarsWithValuesAsString(s.getVector());
// Get all the continuous variable assignments.
HashMap<String, ExprTree> assignTrees = firedTran.getContAssignTrees();
for(String contVar : assignTrees.keySet()){
// Get the bounds to assign the continuous variables.
IntervalPair assignment =
assignTrees.get(contVar).evaluateExprBound(currentValues, this);
// Make the assignment.
setContinuousBounds(contVar, lpn, assignment);
}
}
/* (non-Javadoc)
* @see verification.timed_state_exploration.zone.Zone#getLexicon()
*/
// public HashMap<Integer, Transition> getLexicon(){
// if(_indexToTransition == null){
// return null;
// return new HashMap<Integer, Transition>(_indexToTransition);
// public void setLexicon(HashMap<Integer, Transition> lexicon){
// _indexToTransition = lexicon;
/**
* Gives an array that maps the index of a timer in the DBM to the timer's index.
* @return
* The array that maps the index of a timer in the DBM to the timer's index.
*/
// public int[] getIndexToTimer(){
// return Arrays.copyOf(_indexToTimerPair, _indexToTimerPair.length);
/**
* Calculates a warping value needed to warp a Zone. When a zone is being warped the form
* r1*z2 - r1*z1 + r2*z1 becomes important in finding the new values of the zone. For example,
*
* @param z1
* Upper bound or negative lower bound.
* @param z2
* Relative value.
* @param r1
* First ratio.
* @param r2
* Second ratio.
* @return
* r1*z2 - r1*z1 + r2*z1
*/
public int warp(int z1, int z2, int r1, int r2){
/*
* See "Verification of Analog/Mixed-Signal Circuits Using Labeled Hybrid Petri Nets"
* by S. Little, D. Walter, C. Myers, R. Thacker, S. Batchu, and T. Yoneda
* Section III.C for details on how this function is used and where it comes
* from.
*/
return r1*z2 - r1*z1 + r2*z1;
}
/**
* Warps a Zone.
* @return
* The warped Zone.
*/
public Zone dmbWarp(){
/*
* See "Verification of Analog/Mixed-Signal Circuits Using Labeled Hybrid Petri Nets"
* by S. Little, D. Walter, C. Myers, R. Thacker, S. Batchu, and T. Yoneda
* Section III.C for details on how this function is used and where it comes
* from.
*/
return null;
}
/**
* The DiagonalNonZeroException extends the java.lang.RuntimerExpcetion.
* The intention is for this exception to be thrown is a Zone has a non zero
* entry appear on the diagonal.
*
* @author Andrew N. Fisher
*
*/
public class DiagonalNonZeroException extends java.lang.RuntimeException
{
/**
* Generated serialVersionUID.
*/
private static final long serialVersionUID = -3857736741611605411L;
/**
* Creates a DiagonalNonZeroException.
* @param Message
* The message to be displayed when the exception is thrown.
*/
public DiagonalNonZeroException(String Message)
{
super(Message);
}
}
/**
* This exception is thrown when trying to merge two zones whose corresponding timers
* do not agree.
* @author Andrew N. Fisher
*
*/
// public class IncompatibleZoneException extends java.lang.RuntimeException
// // TODO : Check if this class can be removed.
// /**
// * Generated serialVersionUID
// */
// private static final long serialVersionUID = -2453680267411313227L;
// public IncompatibleZoneException(String Message)
// super(Message);
/**
* Clears out the lexicon.
*/
// public static void clearLexicon(){
// _indexToTransition = null;
private IntervalPair parseRate(String rate){
String rateNoSpaces = rate.trim();
// First check if the string is a single number.
// Integer i = Integer.parseInt(rate);
// if(i != null){
// // The string is a number, so set the upper and lower bounds equal.
// return new IntervalPair(i,i);
// First check for a comma (representing an interval input).
int commaIndex = rateNoSpaces.indexOf(",");
if(commaIndex < 0){
// Assume that the string is a constant. A NumberFormatException
// will be thrown otherwise.
int i = Integer.parseInt(rate);
return new IntervalPair(i,i);
}
String lowerString = rateNoSpaces.substring(1, commaIndex).trim();
String upperString = rateNoSpaces.substring(commaIndex+1,
rateNoSpaces.length()-1).trim();
return new IntervalPair(Integer.parseInt(lowerString),
Integer.parseInt(upperString));
}
}
|
package org.zeroxlab.game;
import android.view.MotionEvent;
import android.os.SystemClock;
import com.stickycoding.rokon.Scene;
import com.stickycoding.rokon.Sprite;
import com.stickycoding.rokon.background.FixedBackground;
import com.stickycoding.rokon.TextureAtlas;
import com.stickycoding.rokon.Texture;
import com.stickycoding.rokon.Drawable;
import com.stickycoding.rokon.Rokon;
import android.game.tetris.ITetrisConstants;
import android.game.tetris.TetrisGame;
public class GameScene extends Scene implements TetrisGame.GameCallback, ITetrisConstants {
private FixedBackground background;
private Sprite mBtnRight;
private Sprite mBtnLeft;
private Sprite mBtnDown;
private Sprite mBtnRotate;
private Sprite mBorder;
private Texture backgroundTexture;
private Texture cellTexture;
private Texture btnRight;
private Texture btnLeft;
private Texture btnDown;
private Texture btnRotate;
private Texture borderNormal;
private Texture borderPuke;
private static float sSceneWidth = 480f;
private static float sSceneHeight = 320f;
private static boolean[] mCells = new boolean[PLAYFIELD_COLS * PLAYFIELD_ROWS];
private boolean mPuking = false;
private static long mPukeTime;
private static long mNow;
final private static long sPukePeriod = 600;
private Board mBoard;
private TetrisGame mGame;
public GameScene() {
super(2, 10);
TextureAtlas atlas = new TextureAtlas();
backgroundTexture = new Texture("background.png");
cellTexture = new Texture("cell.png");
btnRight = new Texture("btn_right.png");
btnLeft = new Texture("btn_left.png");
btnDown = new Texture("btn_down.png");
btnRotate = new Texture("btn_rotate.png");
borderNormal = new Texture("border_normal.png");
borderPuke = new Texture("border_puke.png");
atlas.insert(backgroundTexture);
atlas.insert(cellTexture);
atlas.insert(btnRight);
atlas.insert(btnLeft);
atlas.insert(btnDown);
atlas.insert(btnRotate);
atlas.insert(borderNormal);
atlas.insert(borderPuke);
atlas.complete();
setBackground(background = new FixedBackground(backgroundTexture));
float bX = sSceneWidth * 0.38f;
float bY = 0;
float bW = sSceneHeight * 0.64f;
float bH = sSceneHeight;
mBorder = new Sprite(bX, bY, bW, bH);
mBorder.setTexture(borderNormal);
add(1, mBorder);
bX = bX + bW * 0.065f;
bY = bY + bH * 0.28f;
bW = bW * 0.87f;
bH = bH * 0.66f;
mBoard = new Board(bX, bY, bW, bH, mCells);
mBoard.setTexture(cellTexture);
mBoard.show();
add(0, mBoard);
float btnSize = sSceneHeight * 0.2f;
mBtnLeft = new Sprite(sSceneWidth * 0.05f, sSceneHeight * 0.4f, btnSize, btnSize);
mBtnRight = new Sprite(sSceneWidth * 0.21f, sSceneHeight * 0.7f, btnSize, btnSize);
mBtnDown = new Sprite(sSceneWidth * 0.21f, sSceneHeight * 0.2f, btnSize * 0.8f, btnSize * 0.8f);
mBtnRotate = new Sprite(sSceneWidth * 0.9f, sSceneHeight * 0.5f, btnSize, btnSize);
mBtnLeft.setTexture(btnLeft);
mBtnRight.setTexture(btnRight);
mBtnDown.setTexture(btnDown);
mBtnRotate.setTexture(btnRotate);
mBtnLeft.setTouchable();
mBtnRight.setTouchable();
mBtnDown.setTouchable();
mBtnRotate.setTouchable();
add(1, mBtnLeft);
add(1, mBtnRight);
add(1, mBtnDown);
add(1, mBtnRotate);
mGame = new TetrisGame(Rokon.getActivity(), this);
}
@Override
public void onGameLoop() {
if (mPuking) {
mNow = SystemClock.uptimeMillis();
if ((mNow - mPukeTime) > sPukePeriod) {
mPuking = false;
mBorder.setTexture(borderNormal);
}
}
mGame.runRound();
}
@Override
public void onTouchDown(Drawable object, float x, float y, MotionEvent event, int pointerCount, int pointerId) {
if (object == mBtnRotate) {
mGame.actionRotate();
} else if (object == mBtnRight){
mGame.actionRight();
} else if (object == mBtnLeft){
mGame.actionLeft();
} else if (object == mBtnDown) {
mGame.actionFall();
}
}
@Override
public void onTouchDown(float x, float y, MotionEvent event, int pointerCount, int pointerId) {
// This is called when you press down on the screen.
}
@Override
public void onTouchMove(float x, float y, MotionEvent event, int pointerCount, int pointerId) {
// This is called when you move your finger over the screen.
//(ie pretty much every frame if your holding your finger down)
}
@Override
public void onTouchUp(float x, float y, MotionEvent event, int pointerCount, int pointerId) {
// And this is called when you stop pressing.
}
@Override
public void onPause() {
mGame.setGameFocus(false);
}
@Override
public void onResume() {
mGame.setGameFocus(true);
}
@Override
public void onReady() {
mGame.setGameFocus(true);
}
public void onGameOver() {
}
public void onRedraw() {
}
public void onShapeChanged(int current, int next) {
mPuking = true;
mPukeTime = SystemClock.uptimeMillis();
mBorder.setTexture(borderPuke);
}
public void onCellUpdated(boolean[] cells, int rows, int cols) {
for (int i = 0; i < cells.length; i++) {
mCells[i] = cells[i];
}
}
}
|
package stallone.datasequence.io;
import java.io.*;
import java.nio.*;
import java.util.Arrays;
import java.util.BitSet;
import static stallone.api.API.*;
import stallone.api.doubles.IDoubleArray;
import stallone.api.io.IReleasableFile;
import stallone.doubles.fastutils.LongArrayList;
import stallone.io.CachedRandomAccessFile;
public class XtcFile implements IReleasableFile
{
protected String filename;
/**
* The file we are reading from.
*/
protected CachedRandomAccessFile randomAccessFile;
/**
* By this java class supported gromacs versions (="magic number"),
* 1995_10=0x000007cb.
*/
protected final int[] supportedMagicNrs =
{
1995
};
// I don't know in detail what this array does, but it has a interesting structure
// First 9 elements are 0 => if system has =< 9 atoms coordinates are not compressed ?!
// Every third element is 2^i , e.g. 2^3=8 (10 element), 2^4=16 (13 element), 2^5=32 (16 element) ...
// The elements randomAccessFile the array are correlated with the amount of bits used for encoding the
// atom coordinates
protected final static int[] xtc_magicints =
{
0, 0, 0, 0, 0, 0, 0, 0, 0,
8, 10, 12, 16, 20, 25, 32, 40, 50, 64,
80, 101, 128, 161, 203, 256, 322, 406, 512, 645,
812, 1024, 1290, 1625, 2048, 2580, 3250, 4096, 5060, 6501,
8192, 10321, 13003, 16384, 20642, 26007, 32768, 41285, 52015, 65536,
82570, 104031, 131072, 165140, 208063, 262144, 330280, 416127, 524287, 660561,
832255, 1048576, 1321122, 1664510, 2097152, 2642245, 3329021, 4194304, 5284491, 6658042,
8388607, 10568983, 13316085, 16777216
};
/**
* Number of frames (from 1 to n), frameIndex goes from 0 to n-1.
*/
protected int numOfFrames;
/**
* Amount of bytes randomAccessFile file.
*/
protected long fileSize;
/**
* Stores starting positions of each frame randomAccessFile bytes.
*/
protected long[] framePos;
/**
* Stores status of all frames (false:= frame ok, true:= frame broken).
*/
protected BitSet frameBroken;
/**
* Store the number of the current frame (from 0 to numOfFrames-1), to avoid
* reading same frame data.
*/
protected int nrOfCurrentFrameHeader;
/**
* Store the nr. of the current frame (from 0 to numOfFrames-1), to avoid
* reading and decoding same frame data
*/
protected int nrOfCurrentFrameCoordinates;
protected int frameSize;
/**
* Size randomAccessFile bytes of current frame header => size (52byte) is
* fixed over trajectory frames.
*/
protected final int frameHeaderSize = 52;
/**
* Size randomAccessFile bytes of "coordiantes header".
*/
protected int coordinatesHeaderSize;
/**
* Size randomAccessFile bytes of coordinates block ("coordinates header"
* and "coordinates") randomAccessFile current frame of trajectory => size
* may change over trajectory frames.
*/
protected int coordinatesHeaderAndCoordinatesSize;
/**
* "Magic number" (=Gromacs version= 1995_10=0x000007cb ) read from input
* trajectory.
*/
protected int magicNrFileVersion;
/**
* Amount of atomes randomAccessFile current frame from "frame header"
* (should not change over frames).
*/
protected int nrAtoms;
/**
* Simulation frame number randomAccessFile trajectory (not real frame
* number!), e.g. 0,100,200,....
*/
protected int step;
/**
* Floating point representation of simulation time randomAccessFile
* trajectory.
*/
protected float t;
/**
* 3x3 matrix , computational box which is stored as a set of three basis
* vectors, to allow for triclinic PBC. For a rectangular box the box edges
* are stored on the diagonal of the matrix.
*/
protected float[] box;
/**
* Amount atoms randomAccessFile current frame from "coordinates header" =>
* content redundant => ignore it!
*/
protected int nrAtomsCoordinatesHeader;
/**
* Scale factor to reduce coordinate precision from float to int (default
* 1000).
*/
protected float precision;
/**
* Scaled minimal x,y,z coordinate of atom randomAccessFile current frame.
*/
protected int[] minInt;
/**
* Scaled maximal x,y,z coordinate of atom randomAccessFile current frame.
*/
protected int[] maxInt;
/**
* Number of bits nb used to code atom coordinates.
*/
protected int amountBitsForCompressedCoordinates;
/**
* Length of compressed coordinates stream randomAccessFile bytes.
*/
protected int compressedCoordinatesLengthInByte;
/**
* Datastructure to store compressed atom coordinates.
*/
protected int[] coordinatesCompressed;
/**
* Datastructure to store uncompressed atom coordinates.
*/
protected IDoubleArray coordinatesUncompressed;
protected int[] bytes = new int[32];
/**
* will be used while frame-wise reading this file
*/
protected ByteBuffer bb = null;
/**
* indicates, that currently set files header has been scanned.
*/
protected boolean initialized = false;
/**
* Constructor, open and read input file trajectory.
*
* @param filename trajectory (xtc) filename and path
*/
public XtcFile()
{
}
public XtcFile(String _filename) throws FileNotFoundException, IOException
{
this.filename = _filename;
this.init();
}
public void setSource(String _filename)
{
this.filename = _filename;
}
public void scan()
throws IOException
{
this.init();
}
/**
* Constructor, open and read input file trajectory.
*
* @param filename trajectory (xtc) filename and path
*/
private void init() throws FileNotFoundException, IOException
{
// read file header only once.
if (this.initialized)
return;
// read input file trajectory
this.randomAccessFile = new CachedRandomAccessFile(filename);
// for non cached access
// this.randomAccessFile2 = new RandomAccessFile( filename, "r" );
this.fileSize = randomAccessFile.length(); // get amount of bytes randomAccessFile file
// read main properties from the first frame of the trajectory
this.magicNrFileVersion = this.randomAccessFile.readInt(); // read "magic number" (=Gromacs version)
Arrays.sort(this.supportedMagicNrs); // Ensure array sorted
if (Arrays.binarySearch(this.supportedMagicNrs, this.magicNrFileVersion) == -1)
{
System.out.println("Warning: Wrong magic number, this Gromacs version is not supported!");
}
this.nrAtoms = this.randomAccessFile.readInt(); // read number of atoms randomAccessFile current frame (should
// not change over frames)
this.step = this.randomAccessFile.readInt(); // simulation frame number randomAccessFile trajectory (not real
// number!), e.g. 0, 100, 200,....
this.t = this.randomAccessFile.readFloat(); // floating point representation of simulation time randomAccessFile
// trajectory
this.box = new float[]
{
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat(),
this.randomAccessFile.readFloat()
};
// reset inputfile pointer to start position
this.randomAccessFile.seek(0);
LongArrayList tempFramePositions = new LongArrayList(50000); // tmp vector to store the starting position of the
// frames randomAccessFile input file (length of
// vector can be increased, the length of a array
// not !)
int framesDetected = 0;
long pos = 0;
int magicRead; // frameHeader
int noOfAtomsRead; // frameHeader
int noOfAtomsRead2; // coordinates header
int sizeOfCoordinates; // coordinates header
/*
* double realExpectedTime = 0.0d; double expectedTime = 0.0d; double
* eps = 0.1d;
*/
do
{
pos = randomAccessFile.getFilePointer();
tempFramePositions.add(pos);
// System.out.println("Frame " + framesDetected + " at " + pos );
framesDetected++;
magicRead = randomAccessFile.readInt();
if (magicRead == this.magicNrFileVersion)
{
noOfAtomsRead = randomAccessFile.readInt();
// frameNoRead = randomAccessFile.readInt(); // 4
// simulationTime = randomAccessFile.readFloat();// 4 is simulation time
// 36 is 3x3 cell axis a 4bytes each
randomAccessFile.skipBytes(4 + 4 + 36);
// 52 randomAccessFile total
/*
* //200 - 0.4 System.out.println( "off: " +
* randomAccessFile.getFilePointer() + " frame# " + frameNoRead
* + " sim t: " + simulationTime + " expected t after reset: " +
* expectedTime + " real t: " + realExpectedTime); if (
* Math.abs( (double)simulationTime - expectedTime ) > eps ) {
* System.out.println( "Missing: off: " +
* randomAccessFile.getFilePointer() + " frame# " + frameNoRead
* + " sim t: " + simulationTime + " expected t after reset: " +
* expectedTime + " real t: " + realExpectedTime); expectedTime
* = simulationTime; } expectedTime += 0.2d; realExpectedTime +=
* 0.2d;
*
*/
// now entering coordinates header
noOfAtomsRead2 = randomAccessFile.readInt();
if (noOfAtomsRead == noOfAtomsRead2)
{
if (noOfAtomsRead > 9)
{ // compressed format (case b)
// 4 is precision
// 24 = (4 * 2 * 3 ) is min and max of x, y and z
// 4 is number of bits used for compression
randomAccessFile.skipBytes(4 + 24 + 4);
sizeOfCoordinates = randomAccessFile.readInt();
int mod4 = sizeOfCoordinates % 4;
if (mod4 > 0)
{
sizeOfCoordinates += (4 - mod4);
}
randomAccessFile.skipBytes(sizeOfCoordinates); // skip coordinates
}
else
{ // uncompressed format (case a)
// x, y and z per atom a 4 bytes each
randomAccessFile.skipBytes(noOfAtomsRead * 3 * 4);
}
}
else
{
throw new RuntimeException("Problem with atom sizes");
} // end if-else
}
else
{
throw new RuntimeException("No magic bytes found. Error in trajectory.");
} // end if-else
}
while (randomAccessFile.getFilePointer() < randomAccessFile.length());
this.numOfFrames = framesDetected;
// copy starting postion of frames from vector into an array and check their size
int numberOfFrames = tempFramePositions.size();
this.framePos = new long[numberOfFrames];
this.frameBroken = new BitSet(numberOfFrames);
long tmpPosition_old = 0; // tmp variable to calculate the size of a frame
for (int i = 0; i < numberOfFrames; i++)
{
long tmpPosition = tempFramePositions.getLong(i);
if (((tmpPosition - tmpPosition_old) % 4) != 0)
{ // check if the frame size is a multiple of 4 bytes
this.frameBroken.set(i); // mark frame as broken
System.err.println("WARNING: Frame " + i
+ " (numbering goes from 0 to n-1) has a wrong size. It's not a multiple of 4 bytes. This indicates that the frame may be broken. The frame is marked as broken.");
}
tmpPosition_old = tmpPosition;
this.framePos[i] = tmpPosition;
}
if (((this.fileSize - tmpPosition_old) % 4) != 0)
{ // check if the frame size is a multiple of 4 bytes
int frameNr = numberOfFrames - 1;
this.frameBroken.set(frameNr); // mark frame as broken
System.err.println("WARNING: Last frame " + frameNr
+ " (numbering goes from 0 to n-1) which was read from the trajectory has a wrong size. It's not a multiple of 4 bytes. This indicates that this frame may be broken. The frame is marked as broken.");
}
tempFramePositions = null; // free memory
// fix size of atom coordinates, to improve optimization
this.coordinatesUncompressed = doublesNew.matrix(this.nrAtoms,3);
// clean up and reset stuff
this.randomAccessFile.seek(0); // set inputfile pointer to start position
this.nrOfCurrentFrameHeader = -1; // no proper frame header date is read randomAccessFile at the moment
this.nrOfCurrentFrameCoordinates = -1; // no proper frame coordinates date is read randomAccessFile at the
// moment
// setting header initialized.
this.initialized = true;
}
/**
* Returns the number of degrees of freedom (nAtoms*3).
*/
public int nDOF()
{
return this.nrAtoms * 3;
}
/**
* Returns the number of atoms.
*/
public int nAtoms()
{
return this.nrAtoms;
}
/**
* Amount of frames randomAccessFile trajectory.
*
* @return Amount of frames randomAccessFile trajectory (from 1 to n), 0 :=
* trajectory contains no frames
*/
public int nFrames()
{ // return amount of frames from 1 to n, 0 := trajectory contains no frames
// 2 solutions are possible ( variable numOfFrames= this.framePos.length), the TrrReader uses the second
// solution
if (this.framePos == null)
{
return 0;
} // trajectory contains no frames
return this.numOfFrames;
}
/**
* Query if atoms coordinates are stored randomAccessFile the frame.
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return query result (false:= coordinates don't exist, true:= coordinates
* exist)
*/
public boolean frameHasPosition(int frameIndex) throws IOException
{
if (!readFrame(frameIndex))
{
return false;
} // frame doesn't exists => atoms randomAccessFile frame have no coordinates
else if (this.coordinatesHeaderAndCoordinatesSize > 0)
{
return true;
}
else
{
return false;
}
}
/**
* Get size of frame randomAccessFile bytes.
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return frame size randomAccessFile bytes
*/
public int getFrameSize(int frameIndex)
{
if ((this.numOfFrames == 0) || (frameIndex >= this.numOfFrames)) // trajectory contains no frames or query for a
// frameIndex which is larger then the total
// amount of frames randomAccessFile trajectory
{
return 0;
}
else if (frameIndex < (this.numOfFrames - 1)) // query for frame 0 to n-1
{
return (int) (this.framePos[frameIndex + 1] - this.framePos[frameIndex]);
}
else // query for last frame
{
return (int) (this.fileSize - this.framePos[frameIndex]);
}
}
public int getFrameSize(int frameIndex, boolean show)
{
System.out.println("From: " + this.framePos[frameIndex]);
System.out.println("To: " + this.framePos[frameIndex + 1]);
return getFrameSize(frameIndex);
}
/**
* Check if frame exists randomAccessFile trajectory, read "frame header",
* "coordinate header" and coordinates (if compressed don't decode them) of
* current frame.
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return read result (true:=frame exists, false:=frame does not exist
*/
protected boolean readFrame(int frameIndex) throws IOException
{
// Check if frame exists randomAccessFile trajectory
if (frameIndex >= this.numOfFrames)
{ // frame does not exist
return false;
}
if(this.frameBroken.get(frameIndex))
{ // frame is marked as broken
System.err.println("WARNING: You are trying to read from a broken frame " + frameIndex
+ " (numbering goes from 0 to n-1). This is not allowed! Please fix first the error in your trajectory.");
return false;
}
else
{ // frame exist and is not marked as brocken => read header information from current frame
if (this.nrOfCurrentFrameHeader != frameIndex)
{ // only read data of new (not current) frame
this.nrOfCurrentFrameHeader = frameIndex; // store the nr. of the current frame (from 0 to
// numOfFrames-1), to avoid reading and decoding same frame
// data
this.frameSize = getFrameSize(frameIndex); // frame size randomAccessFile bytes
this.coordinatesHeaderAndCoordinatesSize = this.frameSize - this.frameHeaderSize; // size
// randomAccessFile
// byts of coordinates
// + coordiantesHeader
this.randomAccessFile.seek(this.framePos[frameIndex]); // set file pointer to corresponiding postion
// randomAccessFile input file
// no checking for end of file etc has to be done, this was already done randomAccessFile the
// constructor
// create a buffer array which has the size randomAccessFile bytes
// and ensure current frameSize fits in capacity of byte buffer.
if(bb == null || bb.capacity() < this.frameSize) {
// in case of underrun, allocate 1.5 times more memory than for last frame
// to avoid further reallocations.
int newSize = (int)Math.floor(this.frameSize*1.5);
bb = ByteBuffer.wrap(new byte[newSize]);
}
// reset input positions and read current frameSize bytes.
bb.clear();
this.randomAccessFile.readFully(bb.array(), this.frameSize);
try
{
this.magicNrFileVersion = bb.getInt(); // read "magic number" (=Gromacs version)
this.nrAtoms = bb.getInt(); // read amount atomes randomAccessFile current frame from frame header
this.step = bb.getInt(); // simulation frame number randomAccessFile trajectory (not real number!),
// e.g. 0,100,200,....
this.t = bb.getFloat(); // floating point representation of simulation time randomAccessFile
// trajectory
this.box = new float[]
{
bb.getFloat(),
bb.getFloat(),
bb.getFloat(),
bb.getFloat(),
bb.getFloat(),
bb.getFloat(),
bb.getFloat(),
bb.getFloat(),
bb.getFloat()
};
this.nrAtomsCoordinatesHeader = bb.getInt(); // in the compressed atom coordinates
// block the number of atoms is allso added =>
// redundant => read it and throw it away !
if (this.nrAtoms > 9)
{ // coordinates of atoms are compressed
this.coordinatesHeaderSize = 40; // size randomAccessFile byts of "coordiantes header"
this.precision = bb.getFloat(); // read precision of compressed atom coordinates
this.minInt = new int[]
{
bb.getInt(), bb.getInt(), bb.getInt()
};
this.maxInt = new int[]
{
bb.getInt(), bb.getInt(), bb.getInt()
};
this.amountBitsForCompressedCoordinates = bb.getInt();
this.compressedCoordinatesLengthInByte = bb.getInt();
int compressedCoordinatesSizeInFourBytesMeasure = (int) (((this.coordinatesHeaderAndCoordinatesSize) / 4) - 10); // size randomAccessFile bytes of
// compressed coordinates (without
// coordinates header
// 40byte=10*4byte)
this.coordinatesCompressed = new int[compressedCoordinatesSizeInFourBytesMeasure + 3];
if ((this.compressedCoordinatesLengthInByte
+ ((4 - (this.compressedCoordinatesLengthInByte % 4)) % 4))
!= (this.coordinatesHeaderAndCoordinatesSize - this.coordinatesHeaderSize))
{ // check if frame "coordinates" is a multiple of 4
this.frameBroken.set(frameIndex); // mark frame as broken
System.err.println("WARNING: Coordinates section of frame " + frameIndex
+ " (numbering goes from 0 to n-1) has a wrong size. It's not a multiple of 4 bytes. This indicates that this frames may be broken. Frame marked as broken.");
}
for (int ii = 3; ii < (compressedCoordinatesSizeInFourBytesMeasure + 3); ii++)
{
this.coordinatesCompressed[ii] = bb.getInt();
}
}
else
{ // coordinates of atoms are not compressed
this.coordinatesHeaderSize = 4; // size randomAccessFile byts of "coordiantes header"
this.precision = 0; // coordinates are not compressed => set precision to the arbitary value 0
this.minInt = new int[]
{
0, 0, 0
};
this.maxInt = new int[]
{
0, 0, 0
};
this.amountBitsForCompressedCoordinates = 0;
this.compressedCoordinatesLengthInByte = 0;
for (int iiAtom = 0; iiAtom < this.nrAtoms; iiAtom++)
{
this.coordinatesUncompressed.set(iiAtom, 0, bb.getFloat()); // read x coordinate of atom
this.coordinatesUncompressed.set(iiAtom, 1, bb.getFloat()); // read y coordinate of atom
this.coordinatesUncompressed.set(iiAtom, 2, bb.getFloat()); // read z coordinate of atom
}
} // end if-else
} catch (BufferUnderflowException e)
{
e.printStackTrace();
System.out.println("Frame current " + frameIndex + " from " + framePos[frameIndex] + " to "
+ framePos[frameIndex + 1]);
byte[] buf = bb.array();
for (int i = 0; i < buf.length; i++)
{
if ((i % 16) == 0)
{
System.out.println("");
}
byte b = buf[i];
System.out.print(byteToString(b) + " ");
}
System.out.println("");
System.out.println("Frame current - 1 " + (frameIndex - 1) + " from " + framePos[frameIndex - 1]
+ " to " + framePos[frameIndex - 1 + 1]);
System.out.println("Frame current + 1 " + (frameIndex + 1) + " from " + framePos[frameIndex + 1]
+ " to " + framePos[frameIndex + 1 + 1]);
throw new RuntimeException("Unrecoverable trajectory error.");
} // end try-catch
// System.out.println("nrAtomsCoordinatesHeader=" + nrAtomsCoordinatesHeader + " precision="+ precision
// +" minInt=<" + minInt[0] + "," + minInt[1] + "," + minInt[2]+">
// maxInt=<"+maxInt[0]+","+maxInt[1]+","+maxInt[2] + "> amountBitsForCompressedCoordinates="+
// amountBitsForCompressedCoordinates + " compressedCoordinatesLengthInByte=" +
// compressedCoordinatesLengthInByte +"\n");
} // end if
return true;
} // end if-else
}
public static String byteToString(byte in)
{
byte ch = 0x00;
String out = new String("");
final String[] pseudo =
{
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F"
};
ch = (byte) (in & 0xF0); // Strip off high nibble
ch = (byte) (ch >>> 4); // shift the bits down
ch = (byte) (ch & 0x0F); // must do this is high order bit is on!
out += pseudo[(int) ch]; // convert the nibble to a String Character
ch = (byte) (in & 0x0F); // Strip off low nibble
out += pseudo[(int) ch]; // convert the nibble to a String Character
return out;
}
/**
* Check if frame exists, uncompress coordinates of frame (if new frame read
* first randomAccessFile coordinates) Compressed coordinates of all atoms
* randomAccessFile current frame are stored randomAccessFile the variable
* "this.coordinatesCompressed" the uncompressed coordinates are stored
* randomAccessFile "this.coordinatesUncompressed"
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return read result (true:=frame exists, false:=frame does not exist)
*/
protected boolean uncompressFrameCoordinates(int frameIndex, IDoubleArray out) throws IOException
{
if (frameIndex >= this.numOfFrames)
{ // frame does not exist
return false;
}
if(this.frameBroken.get(frameIndex))
{ // frame is marked as broken
System.err.println("WARNING: You are trying to read from a broken frame " + frameIndex
+ " (numbering goes from 0 to n-1). This is not allowed! Please fix first the error in your trajectory.");
return false;
}
// frame exist => read header information from current frame
if (this.nrOfCurrentFrameCoordinates != frameIndex)
{ // only read frame header and coordinates if it is a
// new frame, if it is already the current frame don't
// do anything
if (this.nrOfCurrentFrameHeader != frameIndex)
{
readFrame(frameIndex);
} // reread frame header, only needed randomAccessFile cases of programming errors
this.nrOfCurrentFrameCoordinates = frameIndex; // store the nr. of the current frame (from 0 to
// numOfFrames-1), to avoid reading and decoding same
// frame data
if (this.nrAtoms > 9)
{ // coordinates of atoms are compressed => uncompress them , if coordinates are
// not compressed don't do anything
// uncompress coordinates
this.coordinatesUncompressed = xdr3dfcoord(this.coordinatesCompressed, this.nrAtoms, this.precision, out);
}
}
return true;
}
/**
* Return positions of atoms randomAccessFile current frame. Check if
* current frame exists and return coordinates
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return coordinates of atoms randomAccessFile current frame ((amount
* atoms)x3 array )
*/
public IDoubleArray getPositionsAt(int frameIndex, IDoubleArray out) throws IOException
{
// System.out.println("frame index "+frameIndex+" "+randomAccessFile.getFilePointer());
if (!readFrame(frameIndex))
{
return null;
} // check if frame exists, read "frame header", "coordinate header" and coordinates (if compressed do not
// decode them) of current frame
if (!uncompressFrameCoordinates(frameIndex, out))
{
return null;
} // read header of current frame and check if frame exists
return this.coordinatesUncompressed;
}
/**
* Return precision of compressing algorithm for current frame The default
* value is 1000 (scale factor to reduce coordinate precision from float to
* int).
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return precision used randomAccessFile current frame (default 1000)
*/
public float getPrecisionAt(int frameIndex) throws IOException
{
if (!readFrame(frameIndex))
{
return 0;
} // read header of current frame and check if frame exists
return this.precision;
}
/**
* Return forces of atoms randomAccessFile current frame Xtc trajectories
* contain per defintion no forces!
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return null
*/
public float[] getForcesAt(int frameIndex) throws IOException
{
return null;
}
/**
* Return basis vectors of simulation box for current frame The simulation
* box (tricline PBC) can be defined by 3 basisvectors a,b and c .
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return simulation box vectors of current frame (3x3 array )
*/
public float[] getBoxAt(int frameIndex) throws IOException
{ // read box vectors from frame header
if (!readFrame(frameIndex))
{
return null;
} // read header of current frame and check if frame exists
return this.box;
}
/**
* Return number of atoms randomAccessFile current frame Read header of
* current frame and check if frame exists.
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return number of atoms randomAccessFile current frame
*/
public int getNumberOfAtomsAt(int frameIndex) throws IOException
{
if (!readFrame(frameIndex))
{
return 0;
} // read header of current frame and check if frame exists
return this.nrAtoms;
}
/**
* Return simulation frame number randomAccessFile trajectory.
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return simulation frame number randomAccessFile trajectory
*/
public int getStepAt(int frameIndex) throws IOException
{
if (!readFrame(frameIndex))
{
return 0;
} // read header of current frame and check if frame exists
return this.step;
}
/**
* Return simulation time of frame randomAccessFile trajectory.
*
* @param frameIndex number of frame randomAccessFile trajectory, frameIndex
* goes from 0 to n-1
*
* @return simulation time which corresponds to cuurent frame number
* randomAccessFile trajectory
*/
public float getTimeAt(int frameIndex) throws IOException
{
if (!readFrame(frameIndex))
{
return 0;
} // read header of current frame and check if frame exists
return this.t;
}
/**
* Close input file trajectory.
*/
@Override
public void close()
throws IOException
{
this.randomAccessFile.close();
}
@Override
public void open()
throws IOException
{
this.randomAccessFile = new CachedRandomAccessFile(filename);
}
@Override
public String getFileName()
{
return (filename);
}
/**
* Simplified java implementation of the funtion "static int sizeofint(
* const int maxSize)" from libxdrf.c.
*
* @param maxSize maximum size of rescaled integers (atom coordinate) to
* compress
*
* @return Method returns the number of bits needed to store an integer with
* given max size
*/
private static int sizeofint(int maxSize)
{
int num = 1;
int num_of_bits = 0;
while ((maxSize >= num) && (num_of_bits < 32))
{
num_of_bits++;
num <<= 1;
}
return num_of_bits;
}
private static int sizeofints(int[] sizes)
{
int i, num;
int num_of_bytes;
int num_of_bits;
int[] bytes = new int[32];
// randomAccessFile c: int bytes[32]
int bytecnt;
int tmp;
num_of_bytes = 1;
bytes[0] = 1;
num_of_bits = 0;
for (i = 0; i < 3; i++)
{ // loop over all 3 dimensions; c variable "num_of_ints" replaced by 3;
tmp = 0;
for (bytecnt = 0; bytecnt < num_of_bytes; bytecnt++)
{
tmp = (bytes[bytecnt] * sizes[i]) + tmp;
bytes[bytecnt] = tmp & 0xff;
tmp >>= 8;
}
while (tmp != 0)
{
bytes[bytecnt++] = tmp & 0xff;
tmp >>= 8;
}
num_of_bytes = bytecnt;
}
num = 1;
num_of_bytes
while (bytes[num_of_bytes] >= num)
{
num_of_bits++;
num *= 2;
}
// int tmpsum = num_of_bits + (num_of_bytes * 8);
// System.out.println("num_of_ints=3" +" sizes[0]=" +sizes[0]+" sizes[1]=" +sizes[1]+" sizes[2]=" +sizes[2]+ "
// num_of_bits=" +num_of_bits+" num_of_bytes=" +num_of_bytes+" tmpsum="+tmpsum);
return num_of_bits + (num_of_bytes * 8);
}
/**
* simplified java implementation of the funtion "static int receivebits(int
* buf[], int num_of_bits)" from libxdrf.c.
*
* <pre>
* Method decodes number (integer) from buffer "buf" using the specified number of bits "num_of_bits".
* Each atom coordinates would in general need uncompressed 4 byte=32 bits. The compressed coordinates are
* encoded with a smaller amount of bits. The bits of the compressed coordinates are concated to a large
* bitstream and then splited into 8 bit (=1 byte) blocks to store them in a byte array.
* The buffer "buf" contains from position 3 to the end the compressed coordinates of all atoms randomAccessFile the current frame.
* This function only decodes the coordinates of a single atom. The information which atom coordinate to decode
* is stored randomAccessFile the first three elements of the buffer "buf":
* - buf[0]=cnt : counts the byte position (1 to end) of the "reading head" randomAccessFile the buffer
* - buf[1]=lastbits: counts the bit position (7 to 0) randomAccessFile the current byte of the "reading head" in the buffer
* - buf[2]=lastbyte: contains the last 8 bit which were read form the buffer "buf"
* In the next step reading start from the position right from the "reading head" !
* An integer (num) is constructed from the extracted bits. This value is returned.
*
* Modification from c to java code:
* - c uses pointer arithmetik to split content of buf (int array=>32bit) to cbuf (unsigned char array=>8bit) => introduce a for loop and bit opertations to do this randomAccessFile java
* - cbuf is implemented in c as unsigned char array (1 byte), java char can store unicode (2 byte), java byte has 1 byte => change variable datatyp to byte
* - Converting from byte (8bit) to int (32bit), java adds 24 times 0 but c adds 24 times 1 => introduce bitmask "0...01...1" (16*0 and 16*1) to flip bits (correctResult=wrongResult & bitmask)
* </pre>
*
* @param buf bitstream (array) of compressed atom coordinates (postion
* randomAccessFile bitstream to decode is written in first three bytes of
* buffer)
* @param num_of_bits number of bits used to decode atom coordinates
*
* @return Method returns the current (encoded randomAccessFile first three
* bytes of buf) partial uncompressed coordinate
*/
private int receivebits(int[] buf, int num_of_bits)
{
try
{
int cnt, num, cnt_tmp, buf_readPosition, cnt_tmpStart;
int lastbits, lastbyte; // randomAccessFile c unsigned int => tmp. variable only used for storing bits
// (bitlevel not numberlevel !) => should cause no error
final int mask = (1 << num_of_bits) - 1; // create bitmask "0...001...1" (24 zeros and 8 ones)
final int bitMaskToConvertByteProperlyToIntInJava = 0xff; // =0...01...1 (16*0 and 16*1), converting from
// byte (8bit) to int (32bit), java adds 24 times
// 0 but c adds 24 times 1 => introduce bitmask
// to flip bits
cnt = buf[0];
lastbits = buf[1]; // lastbits = (unsigned int) buf[1]; randomAccessFile c unsigned int
lastbyte = buf[2]; // lastbyte = (unsigned int) buf[2];
buf_readPosition = (int) (cnt / 4) + 3; // roundDown(cnt/4)+3 ; 1byte=4bit =>4; first 3 elements of buf are
// reserved;
cnt_tmp = cnt % 4; // remainder
cnt_tmpStart = cnt_tmp; // create a copy of remainder for updating cnt=buf[0]
// int ii_timesFour=0, ii_plusThree=0 , bufLegth_minusThree=buf.length-3; // tmp variables to optimise
// loop execution
byte[] cbuf = new byte[4]; // randomAccessFile c unsigned char array (1 byte), java char can store unicode
// (2 byte), java byte (1 byte) => change variable datatyp to byte
num = 0;
while (num_of_bits >= 8)
{
// unelegent java implemtation of the c commands :
// unsigned char * cbuf;
// cbuf = ((unsigned char *)buf) + 3 * sizeof(*buf);
// => cbuf is a tmp variable (1 byte array) which holds a copy of buf (4 byte array) from postion
// buf[3] to end
cbuf[0] = (byte) ((buf[buf_readPosition] >> 24) & 0xFF);
cbuf[1] = (byte) ((buf[buf_readPosition] >> 16) & 0xFF);
cbuf[2] = (byte) ((buf[buf_readPosition] >> 8) & 0xFF);
cbuf[3] = (byte) (buf[buf_readPosition] & 0xFF);
lastbyte = (lastbyte << 8) | (cbuf[cnt_tmp++] & bitMaskToConvertByteProperlyToIntInJava);
num |= (lastbyte >> lastbits) << (num_of_bits - 8);
num_of_bits -= 8;
}
if (num_of_bits > 0)
{
if (lastbits < num_of_bits)
{
cbuf[0] = (byte) ((buf[buf_readPosition] >> 24) & 0xFF);
cbuf[1] = (byte) ((buf[buf_readPosition] >> 16) & 0xFF);
cbuf[2] = (byte) ((buf[buf_readPosition] >> 8) & 0xFF);
cbuf[3] = (byte) (buf[buf_readPosition] & 0xFF);
lastbits += 8;
lastbyte = (lastbyte << 8) | (cbuf[cnt_tmp++] & bitMaskToConvertByteProperlyToIntInJava);
}
lastbits -= num_of_bits;
num |= (lastbyte >> lastbits) & ((1 << num_of_bits) - 1);
}
num &= mask;
buf[0] = cnt + cnt_tmp - cnt_tmpStart;
buf[1] = lastbits;
buf[2] = lastbyte;
// System.out.print("**arb**"+Integer.toHexString(buf[0])+" " + Integer.toHexString(buf[1]) +" "
// +Integer.toHexString(buf[2]) + " " + Integer.toHexString(buf[3])+ " " + Integer.toHexString(buf[4])+ " "
// + Integer.toHexString(buf[5])+ " " +num_of_bits +" " + Integer.toHexString(lastbyte));
return num;
} catch (ArrayIndexOutOfBoundsException e)
{ // last try to catch a error if the frame "coordinates" block is
// broken
System.err.println("ERROR: Incomplete frame " + this.nrOfCurrentFrameCoordinates
+ " (numbering goes from 0 to n-1). It's probably an error in the 'coordinates' section of this frame. For safty reasons the JVM is terminated!");
System.err.println(e.getMessage());
System.exit(-1);
return 0;
} // end try-catch
}
/**
* Simplified java implementation of the funtion "static void
* receiveints(int buf[], const int num_of_ints, int num_of_bits, unsigned
* int sizes[], int nums[])" from libxdrf.c.
*
* <pre>
* Modification from c to java code:
* - Calculation result "int nums[]" was passed in c code as function parmater (call be reference)
* => in java call be reference not possible => give result as return value back to main
* - For Gromacs the variable num_of_ints=3 => in Java implementation the variable is replaced by the number 3 to help the compiler during optimisation
* </pre>
*
* @param buf bitstream (array) of compressed atom coordinates (postion
* randomAccessFile bitstream to decode is written randomAccessFile first
* three bytes of buffer)
* @param num_of_bits number of bits used to decode atom coordinates
* @param sizes rescaled and shifted size of atom coordinates
*
* @return Method returns the current (encoded randomAccessFile first three
* bytes of buf) partial uncompressed coordinate
*/
private int[] receiveints(int[] buf, int num_of_bits, int[] sizes)
{
final int num_of_ints = 3;
Arrays.fill(this.bytes, 0);
int[] nums =
{
0, 0, 0
}; // in c function parameter
int i, j, num_of_bytes = 0, p, num, sizes_valueAtPostionI;
// {int tmpII=0; printf("*riB* buf=");for(tmpII=1; tmpII<=4; tmpII++){printf("%x " ,buf[tmpII]);} printf(",
// num_of_ints=%d num_of_bits=%d sizes[]={%d %d %d} nums[]={%d %d %d}\n", num_of_ints, num_of_bits,
// sizes[0],sizes[1],sizes[2],nums[0],nums[1],nums[2]);}
while (num_of_bits > 8)
{
bytes[num_of_bytes++] = receivebits(buf, 8);
num_of_bits -= 8;
}
if (num_of_bits > 0)
{
bytes[num_of_bytes++] = receivebits(buf, num_of_bits);
}
for (i = num_of_ints - 1; i > 0; i
{
num = 0;
sizes_valueAtPostionI = sizes[i];
for (j = num_of_bytes - 1; j >= 0; j
{
num = (num << 8) | bytes[j];
p = num / sizes_valueAtPostionI; // round (p is int not double!)
bytes[j] = p;
num = num - (p * sizes_valueAtPostionI);
}
nums[i] = num;
}
nums[0] = bytes[0] | (bytes[1] << 8) | (bytes[2] << 16) | (bytes[3] << 24);
// System.out.print("**ari**"+Integer.toHexString(buf[0])+" " + Integer.toHexString(buf[1]) +" "
// +Integer.toHexString(buf[2]) + " " + Integer.toHexString(buf[3])+ " " + Integer.toHexString(buf[4])+ " " +
// Integer.toHexString(buf[5])+ " " +num_of_bits +" Sizes:" + Integer.toHexString(sizes[0])+" "+
// Integer.toHexString(sizes[1])+" "+ Integer.toHexString(sizes[2])+" nums:"+nums[0]+" "+ nums[1]+" "+
// nums[2]+"\n");
// {int tmpII=0; printf("*riA* buf=");for(tmpII=0; tmpII<=4; tmpII++){printf("%x " ,buf[tmpII]);} printf(",
// num_of_ints=%d num_of_bits=%d sizes[]={%d %d %d} nums[]={%d %d %d}\n", num_of_ints, num_of_bits,
// sizes[0],sizes[1],sizes[2],nums[0],nums[1],nums[2]);}
return nums;
}
/**
* Simplified java implementation of the funtion "int xdr3dfcoord(XDR *xdrs,
* float *fp, int *size, float * precision)" from libxdrf.c.
*
* <pre>
* It is introduced to store specifically 3d coordinates of molecules and it writes it randomAccessFile a compressed way.
* It starts by multiplying all numbers by precision and rounding the result to integer. Effectively converting
* all floating point numbers to fixed point. It uses an algorithm for compression that is optimized for
* molecular data.
*
* Modification from c to java code:
* - Function input "XDR *xdrs" (xdr stream with compressed coordinates) was passed randomAccessFile c code as pointer. In Java use insteed a reference to int array
* - The decompressed coordinates where stored randomAccessFile the c code randomAccessFile the float pointer "float *fp" (pointer to array of floats, each array has 3 elements). In Java shift this to function return value.
* </pre>
*
* @param coordinatesCompressed bitstream buffer with compressed atom
* coordinates (postion randomAccessFile bitstream to decode is written
* randomAccessFile first three bytes of buffer)
* @param nrAtoms number of atoms randomAccessFile current frame
* @param precision used for compressing atom coordinates
*
* @return Method returns the uncompressed coordinates of the atoms
* randomAccessFile the current frame
*/
private IDoubleArray xdr3dfcoord(int[] coordinatesCompressed, int nrAtoms, float precision, IDoubleArray out)
{
/*
* private float[][] xdr3dfcoord(boolean readWriteMode) { XDR *xdrs :=
* filepointer to xdr stream => float *fp := pointer to array of floats,
* each array has 3 elements int *size := amount of atoms (=amount of
* coordinates/3) => nrAtomsFrameHeader float *precision := precision of
* compresed coordinates => precision readWriteMode: true=:read,
* false=:write
*/
int[] sizeInt = new int[3];
int[] sizeSmall = new int[3];
int[] thiscoord = new int[3];
int[] prevcoord = new int[3];
int[] bitsizeInt = new int[3];
int flag, k, run, i, iOutput,/* prevrun,*/ is_smaller, bitSize, tmp;
final int firstidx = 9; // start postion in array xtc_magicints of first number !=0
// final int lastidx = xtc_magicints.length; // max. position of elements in array
int smallidx = amountBitsForCompressedCoordinates;
// int maxidx = Math.min(lastidx, smallidx + 8); // select minimum number btween the amount of bits used for
// compresisng coordinates and the maximum element
// randomAccessFile array xtc_magicints
// int minidx = maxidx - 8;
/*
* often this equal smallidx
*/
int smaller = xtc_magicints[Math.max(firstidx, smallidx - 1)] / 2;
int small = xtc_magicints[smallidx] / 2;
// int larger = xtc_magicints[maxidx];
sizeSmall[0] = sizeSmall[1] = sizeSmall[2] = xtc_magicints[smallidx];
float inv_precision = (float) (1.0 / precision); // calculate invers precision for decoding atom coordinates
int[] buf = coordinatesCompressed; // randomAccessFile array buf points to buffer with the compressed
// coordinates
buf[0] = buf[1] = buf[2] = 0; // buf[0-2] are special and do not contain actual data
// Calculate the coding base from the maximal digit randomAccessFile the base by adding 1. E.g. 10 base:
// maximal digit 9 =>9+1=10 =coding base !
sizeInt[0] = maxInt[0] - minInt[0] + 1;
sizeInt[1] = maxInt[1] - minInt[1] + 1;
sizeInt[2] = maxInt[2] - minInt[2] + 1;
// calculate the amount of bits needed to encode numbers randomAccessFile the range sizeInt
if ((sizeInt[0] | sizeInt[1] | sizeInt[2]) > 0xffffff)
{
bitsizeInt[0] = sizeofint(sizeInt[0]);
bitsizeInt[1] = sizeofint(sizeInt[1]);
bitsizeInt[2] = sizeofint(sizeInt[2]);
bitSize = 0;
/*
* flag the use of large sizes
*/
}
else
{
bitSize = sizeofints(sizeInt);
}
run = 0;
i = 0;
iOutput = 0;
while (i < nrAtoms)
{ // for loop implemented a while (easier to optimse)=> for every atom randomAccessFile
// frame decode coordinate
if (bitSize == 0)
{
thiscoord[0] = receivebits(buf, bitsizeInt[0]);
thiscoord[1] = receivebits(buf, bitsizeInt[1]);
thiscoord[2] = receivebits(buf, bitsizeInt[2]);
}
else
{
thiscoord = receiveints(buf, bitSize, sizeInt);
}
i++; // increse for loop counter
// add intial offset to "compressed coordinates" to get the original atom coordinates
thiscoord[0] += minInt[0];
thiscoord[1] += minInt[1];
thiscoord[2] += minInt[2];
prevcoord[0] = thiscoord[0];
prevcoord[1] = thiscoord[1];
prevcoord[2] = thiscoord[2];
flag = receivebits(buf, 1);
is_smaller = 0;
if (flag == 1)
{
run = receivebits(buf, 5);
is_smaller = run % 3;
run -= is_smaller;
is_smaller
}
if (run > 0)
{
for (k = 0; k < run; k += 3)
{
thiscoord = receiveints(buf, smallidx, sizeSmall);
i++;
thiscoord[0] += prevcoord[0] - small;
thiscoord[1] += prevcoord[1] - small;
thiscoord[2] += prevcoord[2] - small;
if (k == 0)
{
// interchange first with second atom for better compression of water molecules
tmp = thiscoord[0];
thiscoord[0] = prevcoord[0];
prevcoord[0] = tmp;
tmp = thiscoord[1];
thiscoord[1] = prevcoord[1];
prevcoord[1] = tmp;
tmp = thiscoord[2];
thiscoord[2] = prevcoord[2];
prevcoord[2] = tmp;
out.set(iOutput, 0, prevcoord[0] * inv_precision);
out.set(iOutput, 1, prevcoord[1] * inv_precision);
out.set(iOutput, 2, prevcoord[2] * inv_precision);
iOutput++;
}
else
{
prevcoord[0] = thiscoord[0];
prevcoord[1] = thiscoord[1];
prevcoord[2] = thiscoord[2];
}
// undo the conversion of atom coordinates from float to int
out.set(iOutput, 0, thiscoord[0] * inv_precision);
out.set(iOutput, 1, thiscoord[1] * inv_precision);
out.set(iOutput, 2, thiscoord[2] * inv_precision);
iOutput++;
} // end for
}
else
{
// undo the conversion of atom coordinates from float to int
out.set(iOutput, 0, thiscoord[0] * inv_precision);
out.set(iOutput, 1, thiscoord[1] * inv_precision);
out.set(iOutput, 2, thiscoord[2] * inv_precision);
iOutput++;
} // end if-else
smallidx += is_smaller;
if (is_smaller < 0)
{
small = smaller;
if (smallidx > firstidx)
{
smaller = xtc_magicints[smallidx - 1] / 2;
}
else
{
smaller = 0;
}
}
else if (is_smaller > 0)
{
smaller = small;
small = xtc_magicints[smallidx] / 2;
}
sizeSmall[0] = sizeSmall[1] = sizeSmall[2] = xtc_magicints[smallidx];
} // end while
return out; // return decompressed atom coordinates
}
};
|
package util;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
/*
* Date .
* DB Date SELECT to_char(date, 'YYYYMMDDHH24MI') FROM table
* Constructor
* 1) String type YYYYMMDDHH24MI
* 2) int year, int month, int day
* 3) Calendar
*
* 2017.10.17 / coding by K
* 2017.10.23 Jason Choi : compareDate .
*/
public class CocoaDate {
// Field
private int year;
private int month;
private int date;
private int startDay;
private int lastDate;
private int hour;
private int minute;
private Calendar originCal;
private Calendar renewCal;
public CocoaDate() { // CocoaDate
Calendar cal = Calendar.getInstance();
setDefault(cal.get(cal.YEAR), cal.get(cal.MONTH)+1, cal.get(cal.DATE), cal.get(cal.HOUR_OF_DAY), cal.get(cal.MINUTE));
setRenewCal(cal);
}
public CocoaDate(Calendar cal) {
setDefault(cal.get(cal.YEAR), cal.get(cal.MONTH)+1, cal.get(cal.DATE), cal.get(cal.HOUR_OF_DAY), cal.get(cal.MINUTE));
setRenewCal(cal);
}
public CocoaDate(int year, int month, int date) {
Calendar cal = Calendar.getInstance();
cal.set(year, month-1, date);
setDefault(cal.get(cal.YEAR), cal.get(cal.MONTH)+1, cal.get(cal.DATE), cal.get(cal.HOUR_OF_DAY), cal.get(cal.MINUTE));
setRenewCal(cal);
}
public CocoaDate(String sql_date) {
//YYYYMMDDHHMM
//012345678901
int year = Integer.parseInt(sql_date.substring(0, 4));
int month = Integer.parseInt(sql_date.substring(4, 6));
int date = Integer.parseInt(sql_date.substring(6, 8));
int hour = Integer.parseInt(sql_date.substring(8, 10));
int minute = Integer.parseInt(sql_date.substring(10));
Calendar cal = Calendar.getInstance();
cal.set(year, month, date, hour, minute);
setDefault(year, month, date, hour, minute);
setRenewCal(cal);
}
// #00047 : Constructor for DATE type --> CocoaDate
public CocoaDate(Date date) {
this(new SimpleDateFormat("YYYYMddHHmm").format(date));
}
private void setDefault(int year, int month, int date, int hour, int minute) {
this.year = year;
this.month = month;
this.date = date;
this.hour = hour;
this.minute = minute;
} // CocoaDate
public String getDateQuery() {
String query = "";
String tempMonth = ""+this.month;
String tempDate = ""+this.date;
String tempHour = ""+this.hour;
String tempMinute = ""+this.minute;
System.out.println("CocoaDate getDateQuery : "+year+"\t"+tempMonth+"\t"+tempDate+"\t"+tempHour+"\t"+tempMinute);
if(this.month<10) tempMonth = "0"+tempMonth;
if(this.date<10) tempDate = "0"+tempDate;
if(this.hour<10) tempHour = "0"+tempHour;
if(this.minute<10) tempMinute = "0"+tempMinute;
query = year+tempMonth+tempDate+tempHour+tempMinute;
return query;
}
public Calendar getOriginCal() {
return originCal;
}
public void setOriginCal(Calendar originCal) {
this.originCal = originCal;
}
public Calendar getRenewCal() {
return renewCal;
}
public void setRenewCal(Calendar originCal) {
this.renewCal = originCal;
renewCal.set(Calendar.DATE, 1);
this.startDay = renewCal.get(renewCal.DAY_OF_WEEK);
this.lastDate = renewCal.getActualMaximum(renewCal.DATE);
} // Date temp Calendar
public void setYear(int year) {
this.year = year;
}
public void setMonth(int month) {
this.month = month;
}
public void setDate(int day) {
this.date = day;
}
public int getHour() {
return hour;
}
public void setHour(int hour) {
this.hour = hour;
}
public int getMinute() {
return minute;
}
public void setMinute(int minute) {
this.minute = minute;
}
public void setStartDay(int startDay) {
this.startDay = startDay;
}
public void setEndDay(int endDay) {
this.lastDate = endDay;
}
public int getYear() {
return year;
}
public int getMonth() {
return month;
}
public int getDate() {
return date;
}
public String getDateDD() {
if(date<10) return "0"+date;
else return date+"";
}
public int getStartDay() {
return startDay;
}
public int getEndDay() {
return lastDate;
}
public int getLastDate() {
return lastDate;
}
public void setLastDate(int lastDate) {
this.lastDate = lastDate;
}
public boolean compareDate(CocoaDate pDate) {
// sychoi : compare THIS CocoaDate and pDate
return this.year == pDate.getYear()
&& this.month == pDate.getMonth()
&& this.date == pDate.getDate();
}
public boolean compareDate(CocoaDate srcDate, CocoaDate destDate) {
// sychoi : check if THIS CocoaDate instance INCLUSIVELY in between srcDate & destDate
return IntegerRange.betweenInclusive(this.getYear(), srcDate.getYear(), destDate.getYear())
&& IntegerRange.betweenInclusive(this.getMonth(), srcDate.getMonth(), destDate.getMonth())
&& IntegerRange.betweenInclusive(this.getDate(), srcDate.getDate(), destDate.getDate());
}
@Override
public String toString() {
return year + "/" + month + "/" + date + ", " + hour + ":"
+ minute;
}
}
|
package hex.tree.isoforextended;
import water.MRTask;
import water.fvec.Chunk;
import water.fvec.Frame;
import water.fvec.NewChunk;
import water.util.CollectionUtils;
import water.util.RandomUtils;
import java.util.HashSet;
import java.util.Random;
public class SamplingUtils {
/**
*
* @param frame Frame to be sampled
* @param sampleSize approximate size of sample
* @param seed ...
* @return Random sub-sample of frame with size approximately {@code sampleSize}
*/
public static Frame sampleOfApproxSize(Frame frame, int sampleSize, long seed) {
return new SubSampleTask(sampleSize, frame.numRows(),seed)
.doAll(frame.types(), frame.vecs()).outputFrame();
}
/**
*
* @param frame Frame to be sampled
* @param sampleSize exact size of sample
* @param seed ...
* @return Random sub-sample of frame with size equals to {@code sampleSize}
*/
public static Frame sampleOfFixedSize(Frame frame, int sampleSize, long seed) {
HashSet<Long> rowsToChoose = CollectionUtils.setOfUniqueRandomNumbers(sampleSize, frame.numRows(), seed);
return new ChooseRowsTask(rowsToChoose).doAll(frame.types(), frame).outputFrame();
}
private static class ChooseRowsTask extends MRTask<ChooseRowsTask> {
private final HashSet<Long> rowsToChoose;
public ChooseRowsTask(HashSet<Long> rowsToChoose) {
this.rowsToChoose = rowsToChoose;
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
for (int row = 0; row < cs[0]._len; row++) {
if (rowsToChoose.contains(row + cs[0].start())) {
for (int column = 0; column < cs.length; column++) {
ncs[column].addNum(cs[column].atd(row));
}
}
}
}
}
/**
* Task to create random sub-sample of given Frame
*/
private static class SubSampleTask extends MRTask<SubSampleTask> {
private final long seed;
private final double sampleRate;
public SubSampleTask(int sampleSize, long frameNumRows, long seed) {
this.seed = seed;
this.sampleRate = ((double) sampleSize) / frameNumRows;
}
@Override
public void map(Chunk[] cs, NewChunk[] ncs) {
Random random = RandomUtils.getRNG(seed + cs[0].start());
for (int row = 0; row < cs[0]._len; row++) {
if (random.nextDouble() <= sampleRate) {
for (int column = 0; column < cs.length; column++) {
ncs[column].addNum(cs[column].atd(row));
}
}
}
}
}
}
|
package org.opencds.cqf.cql.elm.execution;
import org.opencds.cqf.cql.execution.Context;
import org.opencds.cqf.cql.runtime.Date;
import org.opencds.cqf.cql.runtime.DateTime;
import java.lang.reflect.InvocationTargetException;
// for Uncertainty
/*
CalculateAgeInYears(birthDate Date) Integer
CalculateAgeInYears(birthDate DateTime) Integer
CalculateAgeInMonths(birthDate Date) Integer
CalculateAgeInMonths(birthDate DateTime) Integer
CalculateAgeInWeeks(birthDate Date) Integer
CalculateAgeInWeeks(birthDate DateTime) Integer
CalculateAgeInDays(birthDate Date) Integer
CalculateAgeInDays(birthDate DateTime) Integer
CalculateAgeInHours(birthDate DateTime) Integer
CalculateAgeInMinutes(birthDate DateTime) Integer
CalculateAgeInSeconds(birthDate DateTime) Integer
The CalculateAge operators calculate the age of a person born on the given birth date/time
as of today/now in the precision named in the operator.
If the birthdate is null, the result is null.
The CalculateAge operators are defined in terms of a date/time duration calculation.
This means that if the given birthDate is not specified to the level of precision corresponding
to the operator being invoked, the result will be an uncertainty over the range of possible values,
potentially causing some comparisons to return null.
*/
public class CalculateAgeEvaluator extends org.cqframework.cql.elm.execution.CalculateAge {
public static Object calculateAge(Object operand, String precision, Object today) throws NoSuchMethodException, IllegalAccessException, InvocationTargetException {
if (operand == null) {
return null;
}
return CalculateAgeAtEvaluator.calculateAgeAt(operand, today, precision);
}
@Override
public Object evaluate(Context context) {
Object operand = getOperand().evaluate(context);
String precision = getPrecision().value();
Object today =
operand instanceof Date
? DateFromEvaluator.dateFrom(context.getEvaluationDateTime())
: context.getEvaluationDateTime();
try
{
return calculateAge(operand, precision, today);
}
catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException e)
{
e.printStackTrace();
throw new RuntimeException(e.getMessage());
}
}
}
|
package com.thinkaurelius.titan.hadoop.compat;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskInputOutputContext;
import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
import com.thinkaurelius.titan.hadoop.HadoopGraph;
/**
* This interface encapsulates both API and bytecode-level
* IncompatibleClassChanges in Hadoop core. In theory, titan-hadoop-core should
* only touch parts of Hadoop core ABI that have remained stable since 1.0, and
* everything else should be hidden behind the compat layer.
* <p>
* This interface is public, but should be considered unstable and likely to
* change in the future as new Hadoop versions are released, or as
* titan-hadoop-core uses additional Hadoop features, or as bugs are discovered.
* It's possible to write and use a third-party implementation, but be prepared
* to update it when upgrading to a newer Titan release.
*/
public interface HadoopCompat {
/**
* Instantiate and return a HadoopCompiler instance that uses the supplied
* graph
*
* @param g data source/sink for the task compiler
* @return new compiler
*/
public HadoopCompiler newCompiler(HadoopGraph g);
/**
* Instantiate a new TaskAttemptContext using the given attempt ID and configuration.
*
* @param c configuration
* @param t task attempt ID
* @return new context object
*/
public TaskAttemptContext newTask(Configuration c, TaskAttemptID t);
/**
* Return the Hadoop configuration key which takes a boolean value and
* controls whether Hadoop will attempt speculative execution of mappers.
*
* @return string config key
*/
public String getSpeculativeMapConfigKey();
/**
* Return the Hadoop configuration key which takes a boolean value and
* controls whether Hadoop will attempt speculative execution of reducers.
*
* @return string config key
*/
public String getSpeculativeReduceConfigKey();
public String getMapredJarConfigKey();
/**
* Add {@code incr} to the counter designated by {@code counter} on {@code context}.
*
* @param context Hadoop task IO context containing counter state
* @param counter name of the counter
* @param incr amount to add to the counter's current value
*/
public void incrementContextCounter(TaskInputOutputContext context, Enum<?> counter, long incr);
/**
* Get configuration from the supplied task attempt context and return it.
*
* @param context Hadoop task attempt context
* @return configuration on supplied {@code context}
*/
public Configuration getContextConfiguration(TaskAttemptContext context);
/**
* Get configuration from the supplied job context and return it.
*
* @param context Hadoop job context
* @return configuration on supplied {@code context}
*/
public Configuration getJobContextConfiguration(JobContext context);
/**
* Get the value of the counter specified by {@code e} on {@code counters}.
*
* @param counters MRUnit test driver containing counter state
* @param e the name of the counter whose value should be retrieved
* @return current value
*/
public long getCounter(MapReduceDriver counters, Enum<?> e);
}
|
package lucee.runtime.tag;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map.Entry;
import javax.servlet.jsp.tagext.Tag;
import lucee.commons.lang.HTMLEntities;
import lucee.commons.lang.StringUtil;
import lucee.runtime.exp.ApplicationException;
import lucee.runtime.exp.ExpressionException;
import lucee.runtime.exp.PageException;
import lucee.runtime.ext.tag.TagImpl;
import lucee.runtime.op.Caster;
import lucee.runtime.op.Decision;
import lucee.runtime.type.Array;
import lucee.runtime.type.Collection.Key;
import lucee.runtime.type.Struct;
import lucee.runtime.type.StructImpl;
import lucee.runtime.type.util.KeyConstants;
import lucee.runtime.type.util.ListUtil;
// FUTURE tag input
//attr validateAt impl tag atrr
//attr validate add support for submitOnce
// Added support for generating Flash and XML controls (specified in the cfform tag).
// Added support for preventing multiple submissions.
// attr mask impl. logik dahinter umsetzen
public class Input extends TagImpl {
public static final short TYPE_SELECT=-1;
public static final short TYPE_TEXT=0;
public static final short TYPE_RADIO=1;
public static final short TYPE_CHECKBOX=2;
public static final short TYPE_PASSWORD=3;
public static final short TYPE_BUTTON=4;
public static final short TYPE_FILE=5;
public static final short TYPE_HIDDEN=6;
public static final short TYPE_IMAGE=7;
public static final short TYPE_RESET=8;
public static final short TYPE_SUBMIT=9;
public static final short TYPE_DATEFIELD=10;
public static final short VALIDATE_DATE=4;
public static final short VALIDATE_EURODATE=5;
public static final short VALIDATE_TIME=6;
public static final short VALIDATE_FLOAT=7;
public static final short VALIDATE_INTEGER=8;
public static final short VALIDATE_TELEPHONE=9;
public static final short VALIDATE_ZIPCODE=10;
public static final short VALIDATE_CREDITCARD=11;
public static final short VALIDATE_SOCIAL_SECURITY_NUMBER=12;
public static final short VALIDATE_REGULAR_EXPRESSION=13;
public static final short VALIDATE_NONE=14;
public static final short VALIDATE_USDATE=15;
public static final short VALIDATE_RANGE=16;
public static final short VALIDATE_BOOLEAN=17;
public static final short VALIDATE_EMAIL=18;
public static final short VALIDATE_URL=19;
public static final short VALIDATE_UUID=20;
public static final short VALIDATE_GUID=21;
public static final short VALIDATE_MAXLENGTH=22;
public static final short VALIDATE_NOBLANKS=23;
// TODO SubmitOnce
/**
* @param validate The validate to set.
* @throws ApplicationException
*/
public void setValidate(String validate) throws ApplicationException {
validate=validate.toLowerCase().trim();
if(validate.equals("creditcard")) input.setValidate(VALIDATE_CREDITCARD);
else if(validate.equals("date")) input.setValidate(VALIDATE_DATE);
else if(validate.equals("usdate")) input.setValidate(VALIDATE_USDATE);
else if(validate.equals("eurodate")) input.setValidate(VALIDATE_EURODATE);
else if(validate.equals("float")) input.setValidate(VALIDATE_FLOAT);
else if(validate.equals("numeric")) input.setValidate(VALIDATE_FLOAT);
else if(validate.equals("integer")) input.setValidate(VALIDATE_INTEGER);
else if(validate.equals("int")) input.setValidate(VALIDATE_INTEGER);
else if(validate.equals("regular_expression")) input.setValidate(VALIDATE_REGULAR_EXPRESSION);
else if(validate.equals("regex")) input.setValidate(VALIDATE_REGULAR_EXPRESSION);
else if(validate.equals("social_security_number"))input.setValidate(VALIDATE_SOCIAL_SECURITY_NUMBER);
else if(validate.equals("ssn")) input.setValidate(VALIDATE_SOCIAL_SECURITY_NUMBER);
else if(validate.equals("telephone")) input.setValidate(VALIDATE_TELEPHONE);
else if(validate.equals("phone")) input.setValidate(VALIDATE_TELEPHONE);
else if(validate.equals("time")) input.setValidate(VALIDATE_TIME);
else if(validate.equals("zipcode")) input.setValidate(VALIDATE_ZIPCODE);
else if(validate.equals("zip")) input.setValidate(VALIDATE_ZIPCODE);
else if(validate.equals("range")) input.setValidate(VALIDATE_RANGE);
else if(validate.equals("boolean")) input.setValidate(VALIDATE_BOOLEAN);
else if(validate.equals("email")) input.setValidate(VALIDATE_EMAIL);
else if(validate.equals("url")) input.setValidate(VALIDATE_URL);
else if(validate.equals("uuid")) input.setValidate(VALIDATE_UUID);
else if(validate.equals("guid")) input.setValidate(VALIDATE_GUID);
else if(validate.equals("maxlength")) input.setValidate(VALIDATE_MAXLENGTH);
else if(validate.equals("noblanks")) input.setValidate(VALIDATE_NOBLANKS);
else throw new ApplicationException("attribute validate has an invalid value ["+validate+"]",
"valid values for attribute validate are [creditcard, date, eurodate, float, integer, regular, social_security_number, telephone, time, zipcode]");
}
public static final String[] DAYNAMES_DEFAULT = new String[]{"S", "M", "T", "W", "Th", "F", "S"};
public static final String[] MONTHNAMES_DEFAULT = new String[]{"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
Struct attributes=new StructImpl();
InputBean input=new InputBean();
String passthrough;
String[] daynames=DAYNAMES_DEFAULT;
String[] monthnames=MONTHNAMES_DEFAULT;
boolean enabled=true;
boolean visible=true;
String label;
String tooltip;
String validateAt;
double firstDayOfWeek=0;
String mask;
@Override
public void release() {
super.release();
input=new InputBean();
attributes.clear();
passthrough=null;
daynames=DAYNAMES_DEFAULT;
monthnames=MONTHNAMES_DEFAULT;
enabled=true;
visible=true;
label=null;
tooltip=null;
validateAt=null;
firstDayOfWeek=0;
mask=null;
}
/**
* @param cssclass The cssclass to set.
*/
public void setClass(String cssclass) {
attributes.setEL("class",cssclass);
}
/**
* @param cssstyle The cssstyle to set.
*/
public void setStyle(String cssstyle) {
attributes.setEL("style",cssstyle);
}
/**
* @param id The id to set.
*/
public void setId(String id) {
attributes.setEL("id",id);
}
public void setAccept(String accept) {
attributes.setEL("accept",accept);
}
public void setAccesskey(String accesskey) {
attributes.setEL("accesskey",accesskey);
}
public void setAlign(String align) {
attributes.setEL("align",align);
}
public void setAlt(String alt) {
attributes.setEL("alt",alt);
}
public void setAutocomplete(String autocomplete) {
attributes.setEL("autocomplete",autocomplete);
}
public void setAutofocus(String autofocus) {
attributes.setEL("autofocus",autofocus);
}
public void setBorder(String border) {
attributes.setEL("border",border);
}
public void setDatafld(String datafld) {
attributes.setEL("datafld",datafld);
}
public void setDatasrc(String datasrc) {
attributes.setEL("datasrc",datasrc);
}
public void setForm(String form) {
attributes.setEL("form",form);
}
public void setFormaction(String formAction) {
attributes.setEL("formaction",formAction);
}
public void setFormenctype(String formenctype) {
attributes.setEL("formenctype",formenctype);
}
public void setFormmethod(String formmethod) {
attributes.setEL("formmethod",formmethod);
}
public void setFormnovalidate(String formnovalidate) {
attributes.setEL("formnovalidate",formnovalidate);
}
public void setFormtarget(String formtarget) {
attributes.setEL("formtarget",formtarget);
}
public void setLang(String lang) {
attributes.setEL("lang",lang);
}
public void setList(String list) {
attributes.setEL("list",list);
}
public void setDir(String dir) {
//dir=dir.trim();
//String lcDir=dir.toLowerCase();
//if( "ltr".equals(lcDir) || "rtl".equals(lcDir))
attributes.setEL("dir",dir);
//else throw new ApplicationException("attribute dir for tag input has an invalid value ["+dir+"], valid values are [ltr, rtl]");
}
public void setDataformatas(String dataformatas) {
dataformatas=dataformatas.trim();
//String lcDataformatas=dataformatas.toLowerCase();
//if( "plaintext".equals(lcDataformatas) || "html".equals(lcDataformatas))
attributes.setEL("dataformatas",dataformatas);
//else throw new ApplicationException("attribute dataformatas for tag input has an invalid value ["+dataformatas+"], valid values are [plaintext, html");
}
public void setDisabled(String disabled) {
// alles ausser false ist true
//if(Caster.toBooleanValue(disabled,true))
attributes.setEL("disabled",disabled);
}
public void setEnabled(String enabled) {
// alles ausser false ist true
//setDisabled(Caster.toString(!Caster.toBooleanValue(enabled,true)));
attributes.setEL("enabled",enabled);
}
public void setIsmap(String ismap) {
// alles ausser false ist true
//if(Caster.toBooleanValue(ismap,true)) attributes.setEL("ismap","ismap");
attributes.setEL("ismap",ismap);
}
public void setReadonly(String readonly) {
// alles ausser false ist true
//if(Caster.toBooleanValue(readonly,true)) attributes.setEL("readonly","readonly");
attributes.setEL("readonly",readonly);
}
public void setUsemap(String usemap) {
attributes.setEL("usemap",usemap);
}
/**
* @param onBlur The onBlur to set.
*/
public void setOnblur(String onBlur) {
attributes.setEL("onblur",onBlur);
}
/**
* @param onChange The onChange to set.
*/
public void setOnchange(String onChange) {
attributes.setEL("onchange",onChange);
}
/**
* @param onClick The onClick to set.
*/
public void setOnclick(String onClick) {
attributes.setEL("onclick",onClick);
}
/**
* @param onDblclick The onDblclick to set.
*/
public void setOndblclick(String onDblclick) {
attributes.setEL("ondblclick",onDblclick);
}
/**
* @param onFocus The onFocus to set.
*/
public void setOnfocus(String onFocus) {
attributes.setEL("onfocus",onFocus);
}
/**
* @param onKeyDown The onKeyDown to set.
*/
public void setOnkeydown(String onKeyDown) {
attributes.setEL("onkeydown",onKeyDown);
}
/**
* @param onKeyPress The onKeyPress to set.
*/
public void setOnkeypress(String onKeyPress) {
attributes.setEL("onkeypress",onKeyPress);
}
/**
* @param onKeyUp The onKeyUp to set.
*/
public void setOnkeyup(String onKeyUp) {
attributes.setEL("onKeyUp",onKeyUp);
}
/**
* @param onMouseDown The onMouseDown to set.
*/
public void setOnmousedown(String onMouseDown) {
attributes.setEL("onMouseDown",onMouseDown);
}
/**
* @param onMouseMove The onMouseMove to set.
*/
public void setOnmousemove(String onMouseMove) {
attributes.setEL("onMouseMove",onMouseMove);
}
/**
* @param onMouseUp The onMouseUp to set.
*/
public void setOnmouseup(String onMouseUp) {
attributes.setEL("onMouseUp",onMouseUp);
}
/**
* @param onMouseUp The onMouseUp to set.
*/
public void setOnselect(String onselect) {
attributes.setEL("onselect",onselect);
}
/**
* @param onMouseOut The onMouseOut to set.
*/
public void setOnmouseout(String onMouseOut) {
attributes.setEL("onMouseOut",onMouseOut);
}
/**
* @param onMouseOver The onKeyPress to set.
*/
public void setOnmouseover(String onMouseOver) {
attributes.setEL("onMouseOver",onMouseOver);
}
/**
* @param tabIndex The tabIndex to set.
*/
public void setTabindex(String tabIndex) {
attributes.setEL("tabindex",tabIndex);
}
/**
* @param title The title to set.
*/
public void setTitle(String title) {
attributes.setEL("title",title);
}
/**
* @param value The value to set.
*/
public void setValue(String value) {
attributes.setEL("value",value);
}
/**
* @param size The size to set.
*/
public void setSize(String size) {
attributes.setEL("size",size);
}
/**
* @param maxLength The maxLength to set.
*/
public void setMaxlength(double maxLength) {
input.setMaxLength((int)maxLength);
attributes.setEL("maxLength",Caster.toString(maxLength));
}
/**
* @param checked The checked to set.
*/
public void setChecked(String checked) {
// alles ausser false ist true
if(Caster.toBooleanValue(checked,true)) attributes.setEL("checked","checked");
}
/**
* @param daynames The daynames to set.
* @throws ApplicationException
*/
public void setDaynames(String listDaynames) throws ApplicationException {
String[] arr = ListUtil.listToStringArray(listDaynames, ',');
if(arr.length!=7)
throw new ApplicationException("value of attribute [daynames] must contain a string list with 7 values, now there are "+arr.length+" values");
this.daynames=arr;
}
/**
* @param daynames The daynames to set.
* @throws ApplicationException
*/
public void setFirstdayofweek(double firstDayOfWeek) throws ApplicationException {
if(firstDayOfWeek<0 || firstDayOfWeek>6)
throw new ApplicationException("value of attribute [firstDayOfWeek] must conatin a numeric value between 0-6");
this.firstDayOfWeek=firstDayOfWeek;
}
/**
* @param daynames The daynames to set.
* @throws ApplicationException
*/
public void setMonthnames(String listMonthNames) throws ApplicationException {
String[] arr = ListUtil.listToStringArray(listMonthNames, ',');
if(arr.length==12)
throw new ApplicationException("value of attribute [MonthNames] must contain a string list with 12 values, now there are "+arr.length+" values");
this.monthnames=arr;
}
/**
* @param daynames The daynames to set.
*/
public void setLabel(String label) {
this.label=label;
}
/**
* @param daynames The daynames to set.
*/
public void setMask(String mask) {
this.mask=mask;
}
public void setMax(String max) {
attributes.setEL("max",max);
}
public void setMin(String min) {
attributes.setEL("min",min);
}
public void setMultiple(String multiple) {
attributes.setEL("multiple",multiple);
}
public void setPlaceholder(String placeholder) {
attributes.setEL("placeholder",placeholder);
}
/**
* @param daynames The daynames to set.
*/
public void setNotab(String notab) {
attributes.setEL("notab",notab);
}
/**
* @param daynames The daynames to set.
*/
public void setHspace(String hspace) {
attributes.setEL("hspace",hspace);
}
/**
* @param type The type to set.
* @throws ApplicationException
*/
public void setType(String type) throws ApplicationException {
type=type.toLowerCase().trim();
if( "checkbox".equals(type)) input.setType(TYPE_CHECKBOX);
else if("password".equals(type)) input.setType(TYPE_PASSWORD);
else if("text".equals(type)) input.setType(TYPE_TEXT);
else if("radio".equals(type)) input.setType(TYPE_RADIO);
else if("button".equals(type)) input.setType(TYPE_BUTTON);
else if("file".equals(type)) input.setType(TYPE_FILE);
else if("hidden".equals(type)) input.setType(TYPE_HIDDEN);
else if("image".equals(type)) input.setType(TYPE_IMAGE);
else if("reset".equals(type)) input.setType(TYPE_RESET);
else if("submit".equals(type)) input.setType(TYPE_SUBMIT);
else if("datefield".equals(type)) input.setType(TYPE_DATEFIELD);
else throw new ApplicationException("attribute type has an invalid value ["+type+"]","valid values for attribute type are " +
"[checkbox, password, text, radio, button, file, hidden, image, reset, submit, datefield]");
attributes.setEL("type",type);
}
/**
* @param onError The onError to set.
*/
public void setOnerror(String onError) {
input.setOnError(onError);
}
/**
* @param onValidate The onValidate to set.
*/
public void setOnvalidate(String onValidate) {
input.setOnValidate(onValidate);
}
/**
* @param passthrough The passThrough to set.
* @throws PageException
*/
public void setPassthrough(Object passthrough) throws PageException {
if(passthrough instanceof Struct) {
Struct sct = (Struct) passthrough;
Iterator<Entry<Key, Object>> it = sct.entryIterator();
Entry<Key, Object> e;
while(it.hasNext()) {
e=it.next();
attributes.setEL(e.getKey(),e.getValue());
}
}
else this.passthrough = Caster.toString(passthrough);
//input.setPassThrough(passThrough);
}
/**
* @param pattern The pattern to set.
* @throws ExpressionException
*/
public void setPattern(String pattern) throws ExpressionException {
input.setPattern(pattern);
}
/**
* @param range The range to set.
* @throws PageException
*/
public void setRange(String range) throws PageException {
String errMessage="attribute range has an invalid value ["+range+"], must be string list with numbers";
String errDetail="Example: [number_from,number_to], [number_from], [number_from,], [,number_to]";
Array arr=ListUtil.listToArray(range,',');
if(arr.size()==1) {
double from=Caster.toDoubleValue(arr.get(1,null),true,Double.NaN);
if(!Decision.isValid(from))throw new ApplicationException(errMessage,errDetail);
input.setRangeMin(from);
input.setRangeMax(Double.NaN);
}
else if(arr.size()==2) {
String strFrom=arr.get(1,"").toString().trim();
double from=Caster.toDoubleValue(strFrom,Double.NaN);
if(!Decision.isValid(from) && strFrom.length()>0) {
throw new ApplicationException(errMessage,errDetail);
}
input.setRangeMin(from);
String strTo=arr.get(2,"").toString().trim();
double to=Caster.toDoubleValue(strTo,Double.NaN);
if(!Decision.isValid(to) && strTo.length()>0) {
throw new ApplicationException(errMessage,errDetail);
}
input.setRangeMax(to);
}
else throw new ApplicationException(errMessage,errDetail);
}
/**
* @param required The required to set.
*/
public void setRequired(boolean required) {
input.setRequired(required);
}
/**
* @param name The name to set.
*/
public void setName(String name) {
attributes.setEL(KeyConstants._name,name);
input.setName(name);
}
/**
* @param message The message to set.
*/
public void setMessage(String message) {
if(!StringUtil.isEmpty(message))input.setMessage(message);
}
@Override
public int doEndTag() throws PageException {
try {
_doEndTag();
}
catch (IOException e) {
throw Caster.toPageException(e);
}
return EVAL_PAGE;
}
private void _doEndTag() throws PageException, IOException {
// check attributes
if(input.getValidate()==VALIDATE_REGULAR_EXPRESSION && input.getPattern()==null) {
throw new ApplicationException("when validation type regular_expression is seleted, the pattern attribute is required");
}
Tag parent = getParent();
while(parent!=null && !(parent instanceof Form)){
parent=parent.getParent();
}
if(parent instanceof Form) {
Form form = (Form)parent;
form.setInput(input);
if(input.getType()==TYPE_DATEFIELD && form.getFormat()!=Form.FORMAT_FLASH)
throw new ApplicationException("type [datefield] is only allowed if form format is flash");
}
else {
throw new ApplicationException("Tag must be inside a form tag");
}
draw();
}
void draw() throws IOException, PageException {
// start output
pageContext.forceWrite("<input");
//lucee.runtime.type.Collection.Key[] keys = attributes.keys();
//lucee.runtime.type.Collection.Key key;
Iterator<Entry<Key, Object>> it = attributes.entryIterator();
Entry<Key, Object> e;
while(it.hasNext()) {
e = it.next();
pageContext.forceWrite(" ");
pageContext.forceWrite(e.getKey().getString());
pageContext.forceWrite("=\"");
pageContext.forceWrite(enc(Caster.toString(e.getValue())));
pageContext.forceWrite("\"");
}
if(passthrough!=null) {
pageContext.forceWrite(" ");
pageContext.forceWrite(passthrough);
}
pageContext.forceWrite(">");
}
/**
* html encode a string
* @param str string to encode
* @return encoded string
*/
String enc(String str) {
return HTMLEntities.escapeHTML(str,HTMLEntities.HTMLV20);
}
/**
* @return the monthnames
*/
public String[] getMonthnames() {
return monthnames;
}
/**
* @param monthnames the monthnames to set
*/
public void setMonthnames(String[] monthnames) {
this.monthnames = monthnames;
}
/**
* @param height the height to set
*/
public void setHeight(String height) {
attributes.setEL("height",height);
}
/**
* @param input the input to set
*/
public void setInput(InputBean input) {
this.input = input;
}
/**
* @param passthrough the passthrough to set
*/
public void setPassthrough(String passthrough) {
this.passthrough = passthrough;
}
/**
* @param tooltip the tooltip to set
* @throws ApplicationException
*/
public void setTooltip(String tooltip) {
this.tooltip = tooltip;
}
/**
* @param validateAt the validateAt to set
* @throws ApplicationException
*/
public void setValidateat(String validateAt) throws ApplicationException {
this.validateAt = validateAt;
throw new ApplicationException("attribute validateAt is not supportrd for tag input ");
}
/**
* @param visible the visible to set
* @throws ApplicationException
*/
public void setVisible(boolean visible) {
this.visible = visible;
}
/**
* @param width the width to set
* @throws ApplicationException
*/
public void setWidth(String width) {
attributes.setEL("width", width);
}
private ExpressionException notSupported(String label) {
return new ExpressionException("attribute ["+label+"] is not supported");
}
public void setAutosuggest(String autosuggest) throws ExpressionException {
throw notSupported("autosuggest");
//attributes.setEL("bind",bind);
}
public void setAutosuggestbinddelay(double autosuggestBindDelay) throws ExpressionException {
throw notSupported("autosuggestBindDelay");
//attributes.setEL("bind",bind);
}
public void setAutosuggestminlength(double autosuggestMinLength) throws ExpressionException {
throw notSupported("autosuggestMinLength");
//attributes.setEL("bind",bind);
}
public void setBind(String bind) throws ExpressionException {
throw notSupported("bind");
//attributes.setEL("bind",bind);
}
public void setBindattribute(String bindAttribute) throws ExpressionException {
throw notSupported("bindAttribute");
//attributes.setEL("bind",bind);
}
public void setBindonload(boolean bindOnLoad) throws ExpressionException {
throw notSupported("bindOnLoad");
//attributes.setEL("bind",bind);
}
public void setDelimiter(String delimiter) throws ExpressionException {
throw notSupported("delimiter");
//attributes.setEL("bind",bind);
}
public void setMaxresultsdisplayed(double maxResultsDisplayed) throws ExpressionException {
throw notSupported("maxResultsDisplayed");
//attributes.setEL("bind",bind);
}
public void setOnbinderror(String onBindError) throws ExpressionException {
throw notSupported("onBindError");
//attributes.setEL("bind",bind);
}
public void setShowautosuggestloadingicon(boolean showAutosuggestLoadingIcon) throws ExpressionException {
throw notSupported("showAutosuggestLoadingIcon");
//attributes.setEL("bind",bind);
}
public void setSourcefortooltip(String sourceForTooltip) throws ExpressionException {
throw notSupported("sourceForTooltip");
//attributes.setEL("bind",bind);
}
public void setSrc(String src) {
attributes.setEL("src",src);
}
public void setStep(String step) {
attributes.setEL("step",step);
}
public void setTypeahead(boolean typeahead) throws ExpressionException {
throw notSupported("typeahead");
//attributes.setEL("src",src);
}
}
|
package steamcondenser.steam;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.channels.DatagramChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.util.concurrent.TimeoutException;
import java.util.logging.Logger;
import steamcondenser.steam.packets.SteamPacket;
/**
* @author Sebastian Staudt
* @version $Id$
*/
public class SteamSocket
{
private ByteBuffer buffer;
private DatagramChannel channel;
private Selector selector;
/**
* @param ipAddress The IP of the server to connect to
* @param portNumber The port number of the server
*/
public SteamSocket(InetAddress ipAddress, int portNumber)
throws IOException
{
this.buffer = ByteBuffer.allocate(1400);
this.buffer.order(ByteOrder.LITTLE_ENDIAN);
this.selector = Selector.open();
this.channel = DatagramChannel.open();
this.channel.connect(new InetSocketAddress(ipAddress, portNumber));
this.channel.configureBlocking(false);
this.channel.register(this.selector, SelectionKey.OP_READ);
}
public SteamPacket getReply()
throws IOException, Exception
{
if(this.selector.select(1000) == 0)
{
throw new TimeoutException();
}
int bytesRead;
byte[] packetData = new byte[0];
SteamPacket packet;
this.buffer = ByteBuffer.allocate(1400);
bytesRead = this.channel.read(this.buffer);
this.buffer.rewind();
if(this.buffer.getInt() == -2L)
{
byte[] splitData, tmpData;
int packetCount, packetNumber;
long requestId;
short splitSize;
do
{
requestId = this.buffer.getInt();
packetCount = this.buffer.get();
packetNumber = this.buffer.get() + 1;
splitSize = this.buffer.getShort();
// Omit additional header on the first packet
if(packetNumber == 1)
{
this.buffer.getInt();
}
splitData = new byte[this.buffer.remaining()];
this.buffer.get(splitData);
tmpData = packetData;
packetData = new byte[tmpData.length + splitData.length];
System.arraycopy(splitData, 0, packetData, packetData.length, splitData.length);
this.buffer.clear();
this.channel.read(this.buffer);
this.buffer.rewind();
}
while(packetNumber < packetCount && this.buffer.getInt() == -2L);
packet = SteamPacket.createPacket(packetData);
}
else
{
packetData = new byte[this.buffer.remaining()];
this.buffer.get(packetData);
packet = SteamPacket.createPacket(packetData);
}
this.buffer.flip();
Logger.getLogger("global").info("Received packet of type \"" + packet.getClass().getSimpleName() + "\"");
return packet;
}
/**
* @param dataPacket The {@link steamcondenser.steam.packets.SteamPacket SteamPacket} to send to the remote end
*/
public void send(SteamPacket dataPacket)
throws IOException
{
Logger.getLogger("global").info("Sending data packet of type \"" + dataPacket.getClass().getSimpleName() + "\"");
byte[] data = dataPacket.getBytes();
this.buffer = ByteBuffer.wrap(data);
this.channel.write(this.buffer);
this.buffer.flip();
}
public void finalize()
throws IOException
{
this.channel.close();
}
}
|
package com.careerServices.MainApp;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.PreparedStatement;
import com.mysql.jdbc.jdbc2.optional.MysqlXADataSource;
public class DBtest
{
public static void main(String[] args)
{
MysqlXADataSource dataSource = new MysqlXADataSource(); //configuring the Database/data source
dataSource.setUser("root");
dataSource.setPassword("");
dataSource.setServerName("localhost");
dataSource.setDatabaseName("CareerServices");
try
{
//creating test varibles
int id = 922298;
String name = "David Veloso";
String email = "velosoda@kean.edu";
String phoneNumber = "9083136146";
int arrival = 500;
byte walkIn = 1;
byte classtype = 0;
byte appointment = 0;
byte workshop = 0;
byte fullTime = 1;
byte partTime = 0;
byte freshman = 0;
byte sophomore = 0;
byte junior = 0;
byte senor = 1;
byte alumni = 0;
byte potentialStudent = 0;
byte careerCounseling = 1;
byte jobSearching = 0;
byte resume = 0;
byte coverLetter = 0;
byte mockInterview = 0;
byte internshipInformation = 0;
byte transitionToKean = 0;
byte sophomoreSeminar = 0;
byte juniorSeminar = 0;
byte seniorSeminar = 0;
byte career = 0;
byte copsTest = 0;
byte myersbriggs = 0;
byte approved = 0;
//putting the query to insert in to a string
String query = "insert into student_form(ID,Name,Email,Phone_Number,Arrival,Walk_In,Class,Appointment,Workshop,Full_Time,Part_Time,Freshman,Sophomore,Junior,Senior,Alumni,Potential_Student,Career_Counseling,Job_Searching,Resume,Cover_Letter,Mock_Interview,Internship_Information,Transistion_to_Kean,Sophomore_Seminar,Junior_Seminar,Senior_Seminar,Career,Cops_Test,Myers_Briggs,Approved)"
+ " values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);";
//each question mark represents a variable we want to pass through
//create connection
Connection conn = dataSource.getConnection();
System.out.println("connected");
//create preparedStatement to run the query
PreparedStatement insert = conn.prepareStatement(query);
//putting the test variables into the query
insert.setInt(1, id);
insert.setString(2, name);
insert.setString(3,email);
insert.setString(4, phoneNumber);
insert.setInt(5, arrival);
insert.setByte(6, walkIn);
insert.setByte(7, classtype);
insert.setByte(8, appointment);
insert.setByte(9, workshop);
insert.setByte(10, fullTime);
insert.setByte(11, partTime);
insert.setByte(12, freshman);
insert.setByte(13, sophomore);
insert.setByte(14, junior);
insert.setByte(15, senor);
insert.setByte(16, alumni);
insert.setByte(17, potentialStudent);
insert.setByte(18, careerCounseling);
insert.setByte(19, jobSearching);
insert.setByte(20, resume);
insert.setByte(21, coverLetter);
insert.setByte(22, mockInterview);
insert.setByte(23, internshipInformation);
insert.setByte(24, transitionToKean);
insert.setByte(25, sophomoreSeminar);
insert.setByte(26, juniorSeminar);
insert.setByte(27, seniorSeminar);
insert.setByte(28, career);
insert.setByte(29, copsTest);
insert.setByte(30, myersbriggs);
insert.setByte(31, approved);
//execute the query
insert.execute();
System.out.println("Query Executed");
//close everything to prevent memory leaks
insert.close();
conn.close();
}
catch(Exception e)
{
e.printStackTrace();
}
}
}
|
package org.lwjgl.demo.vulkan;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.vulkan.EXTDebugReport.*;
import static org.lwjgl.vulkan.KHRSwapchain.*;
import static org.lwjgl.vulkan.KHRSurface.*;
import static org.lwjgl.vulkan.VK10.*;
import static org.lwjgl.vulkan.VKUtil.*;
import static org.lwjgl.glfw.GLFW.*;
import static org.lwjgl.glfw.GLFWVulkan.*;
import static autostack.Stack.*;
import java.nio.ByteBuffer;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
import java.nio.LongBuffer;
import org.lwjgl.PointerBuffer;
import org.lwjgl.glfw.GLFWKeyCallback;
import org.lwjgl.glfw.GLFWWindowSizeCallback;
import org.lwjgl.system.Configuration;
import org.lwjgl.system.MemoryUtil.BufferAllocator;
import org.lwjgl.vulkan.VkApplicationInfo;
import org.lwjgl.vulkan.VkAttachmentDescription;
import org.lwjgl.vulkan.VkAttachmentReference;
import org.lwjgl.vulkan.VkClearValue;
import org.lwjgl.vulkan.VkCommandBuffer;
import org.lwjgl.vulkan.VkCommandBufferAllocateInfo;
import org.lwjgl.vulkan.VkCommandBufferBeginInfo;
import org.lwjgl.vulkan.VkCommandPoolCreateInfo;
import org.lwjgl.vulkan.VkDebugReportCallbackCreateInfoEXT;
import org.lwjgl.vulkan.VkDebugReportCallbackEXT;
import org.lwjgl.vulkan.VkDevice;
import org.lwjgl.vulkan.VkDeviceCreateInfo;
import org.lwjgl.vulkan.VkDeviceQueueCreateInfo;
import org.lwjgl.vulkan.VkFramebufferCreateInfo;
import org.lwjgl.vulkan.VkImageMemoryBarrier;
import org.lwjgl.vulkan.VkImageViewCreateInfo;
import org.lwjgl.vulkan.VkInstance;
import org.lwjgl.vulkan.VkInstanceCreateInfo;
import org.lwjgl.vulkan.VkPhysicalDevice;
import org.lwjgl.vulkan.VkPresentInfoKHR;
import org.lwjgl.vulkan.VkQueue;
import org.lwjgl.vulkan.VkQueueFamilyProperties;
import org.lwjgl.vulkan.VkRect2D;
import org.lwjgl.vulkan.VkRenderPassBeginInfo;
import org.lwjgl.vulkan.VkRenderPassCreateInfo;
import org.lwjgl.vulkan.VkSemaphoreCreateInfo;
import org.lwjgl.vulkan.VkSubmitInfo;
import org.lwjgl.vulkan.VkSubpassDescription;
import org.lwjgl.vulkan.VkSurfaceCapabilitiesKHR;
import org.lwjgl.vulkan.VkSurfaceFormatKHR;
import org.lwjgl.vulkan.VkSwapchainCreateInfoKHR;
import org.lwjgl.vulkan.VkViewport;
/**
* Renders a simple cornflower blue image on a GLFW window with Vulkan.
* <p>
* This is a port of the lwjgl3-demos Vulkan ClearScreenDemo, but uses autostack.
* <p>
* Start the JVM with: -javaagent:target/autostack.jar
*
* @author Kai Burjack
*/
public class ClearScreenDemo {
private static final boolean validation = Boolean.parseBoolean(System.getProperty("vulkan.validation", "false"));
private static ByteBuffer[] layers = {
memEncodeASCII("VK_LAYER_LUNARG_threading", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_mem_tracker", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_object_tracker", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_draw_state", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_param_checker", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_swapchain", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_device_limits", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_LUNARG_image", BufferAllocator.MALLOC),
memEncodeASCII("VK_LAYER_GOOGLE_unique_objects", BufferAllocator.MALLOC)
};
/**
* Remove if added to spec.
*/
private static final int VK_FLAGS_NONE = 0;
/**
* This is just -1L, but it is nicer as a symbolic constant.
*/
private static final long UINT64_MAX = 0xFFFFFFFFFFFFFFFFL;
/**
* Create a Vulkan {@link VkInstance} using LWJGL 3.
* <p>
* The {@link VkInstance} represents a handle to the Vulkan API and we need that instance for about everything we do.
*
* @return the VkInstance handle
*/
private static VkInstance createInstance(PointerBuffer requiredExtensions) {
// Here we say what the name of our application is and which Vulkan version we are targetting (having this is optional)
VkApplicationInfo appInfo = VkApplicationInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_APPLICATION_INFO)
.pApplicationName("GLFW Vulkan Demo")
.pEngineName("")
.apiVersion(VK_MAKE_VERSION(1, 0, 2));
// We also need to tell Vulkan which extensions we would like to use.
// Those include the platform-dependent required extensions we are being told by GLFW to use.
// This includes stuff like the Window System Interface extensions to actually render something on a window.
// We also add the debug extension so that validation layers and other things can send log messages to us.
ByteBuffer VK_EXT_DEBUG_REPORT_EXTENSION = memEncodeASCII(VK_EXT_DEBUG_REPORT_EXTENSION_NAME, BufferAllocator.MALLOC);
PointerBuffer ppEnabledExtensionNames = mallocStackPointer(requiredExtensions.remaining() + 1);
ppEnabledExtensionNames.put(requiredExtensions) // <- platform-dependent required extensions
.put(VK_EXT_DEBUG_REPORT_EXTENSION) // <- the debug extensions
.flip();
// Now comes the validation layers. These layers sit between our application (the Vulkan client) and the
// Vulkan driver. Those layers will check whether we make any mistakes in using the Vulkan API and yell
// at us via the debug extension.
PointerBuffer ppEnabledLayerNames = mallocStackPointer(layers.length);
for (int i = 0; validation && i < layers.length; i++)
ppEnabledLayerNames.put(layers[i]);
ppEnabledLayerNames.flip();
// Vulkan uses many struct/record types when creating something. This ensures that every information is available
// at the callsite of the creation and allows for easier validation and also for immutability of the created object.
// The following struct defines everything that is needed to create a VkInstance
VkInstanceCreateInfo pCreateInfo = VkInstanceCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO) // <- identifies what kind of struct this is (this is useful for extending the struct type later)
.pNext(NULL) // <- must always be NULL until any next Vulkan version tells otherwise
.pApplicationInfo(appInfo) // <- the application info we created above
.ppEnabledExtensionNames(ppEnabledExtensionNames) // <- and the extension names themselves
.ppEnabledLayerNames(ppEnabledLayerNames); // <- and the layer names themselves
PointerBuffer pInstance = mallocStackPointer(1); // <- create a PointerBuffer which will hold the handle to the created VkInstance
int err = vkCreateInstance(pCreateInfo, null, pInstance); // <- actually create the VkInstance now!
long instance = pInstance.get(0); // <- get the VkInstance handle
// One word about freeing memory:
// Every host-allocated memory directly or indirectly referenced via a parameter to any Vulkan function can always
// be freed right after the invocation of the Vulkan function returned.
// Check whether we succeeded in creating the VkInstance
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create VkInstance: " + translateVulkanResult(err));
}
// Create an object-oriented wrapper around the simple VkInstance long handle
// This is needed by LWJGL to later "dispatch" (i.e. direct calls to) the right Vukan functions.
VkInstance ret = new VkInstance(instance, pCreateInfo);
// Now we can free/deallocate everything
memFree(VK_EXT_DEBUG_REPORT_EXTENSION);
return ret;
}
/**
* This function sets up the debug callback which the validation layers will use to yell at us when we make mistakes.
*/
private static long setupDebugging(VkInstance instance, int flags, VkDebugReportCallbackEXT callback) {
// Again, a struct to create something, in this case the debug report callback
VkDebugReportCallbackCreateInfoEXT dbgCreateInfo = VkDebugReportCallbackCreateInfoEXT.callocStack()
.sType(VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT) // <- the struct type
.pNext(NULL) // <- must be NULL
.pfnCallback(callback) // <- the actual function pointer (in LWJGL a Closure)
.pUserData(NULL) // <- any user data provided to the debug report callback function
.flags(flags); // <- indicates which kind of messages we want to receive
LongBuffer pCallback = mallocStackLong(1); // <- allocate a LongBuffer (for a non-dispatchable handle)
// Actually create the debug report callback
int err = vkCreateDebugReportCallbackEXT(instance, dbgCreateInfo, null, pCallback);
long callbackHandle = pCallback.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create VkInstance: " + translateVulkanResult(err));
}
return callbackHandle;
}
/**
* This method will enumerate the physical devices (i.e. GPUs) the system has available for us, and will just return
* the first one.
*/
private static VkPhysicalDevice getFirstPhysicalDevice(VkInstance instance) {
IntBuffer pPhysicalDeviceCount = mallocStackInt(1);
int err = vkEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, null);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get number of physical devices: " + translateVulkanResult(err));
}
PointerBuffer pPhysicalDevices = mallocStackPointer(pPhysicalDeviceCount.get(0));
err = vkEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
long physicalDevice = pPhysicalDevices.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get physical devices: " + translateVulkanResult(err));
}
return new VkPhysicalDevice(physicalDevice, instance);
}
private static class DeviceAndGraphicsQueueFamily {
VkDevice device;
int queueFamilyIndex;
}
private static DeviceAndGraphicsQueueFamily createDeviceAndGetGraphicsQueueFamily(VkPhysicalDevice physicalDevice) {
IntBuffer pQueueFamilyPropertyCount = mallocStackInt(1);
vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, null);
int queueCount = pQueueFamilyPropertyCount.get(0);
VkQueueFamilyProperties.Buffer queueProps = VkQueueFamilyProperties.callocStack(queueCount);
vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, queueProps);
int graphicsQueueFamilyIndex;
for (graphicsQueueFamilyIndex = 0; graphicsQueueFamilyIndex < queueCount; graphicsQueueFamilyIndex++) {
if ((queueProps.get(graphicsQueueFamilyIndex).queueFlags() & VK_QUEUE_GRAPHICS_BIT) != 0)
break;
}
FloatBuffer pQueuePriorities = mallocStackFloat(1).put(0.0f);
pQueuePriorities.flip();
VkDeviceQueueCreateInfo.Buffer queueCreateInfo = VkDeviceQueueCreateInfo.callocStack(1)
.sType(VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO)
.queueFamilyIndex(graphicsQueueFamilyIndex)
.pQueuePriorities(pQueuePriorities);
PointerBuffer extensions = mallocStackPointer(1);
ByteBuffer VK_KHR_SWAPCHAIN_EXTENSION = memEncodeASCII(VK_KHR_SWAPCHAIN_EXTENSION_NAME, BufferAllocator.MALLOC);
extensions.put(VK_KHR_SWAPCHAIN_EXTENSION);
extensions.flip();
PointerBuffer ppEnabledLayerNames = mallocStackPointer(layers.length);
for (int i = 0; validation && i < layers.length; i++)
ppEnabledLayerNames.put(layers[i]);
ppEnabledLayerNames.flip();
VkDeviceCreateInfo deviceCreateInfo = VkDeviceCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO)
.pNext(NULL)
.pQueueCreateInfos(queueCreateInfo)
.ppEnabledExtensionNames(extensions)
.ppEnabledLayerNames(ppEnabledLayerNames);
PointerBuffer pDevice = mallocStackPointer(1);
int err = vkCreateDevice(physicalDevice, deviceCreateInfo, null, pDevice);
long device = pDevice.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create device: " + translateVulkanResult(err));
}
DeviceAndGraphicsQueueFamily ret = new DeviceAndGraphicsQueueFamily();
ret.device = new VkDevice(device, physicalDevice, deviceCreateInfo);
ret.queueFamilyIndex = graphicsQueueFamilyIndex;
memFree(VK_KHR_SWAPCHAIN_EXTENSION);
return ret;
}
private static class ColorFormatAndSpace {
int colorFormat;
int colorSpace;
}
private static ColorFormatAndSpace getColorFormatAndSpace(VkPhysicalDevice physicalDevice, long surface) {
IntBuffer pQueueFamilyPropertyCount = mallocStackInt(1);
vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, null);
int queueCount = pQueueFamilyPropertyCount.get(0);
VkQueueFamilyProperties.Buffer queueProps = VkQueueFamilyProperties.callocStack(queueCount);
vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, queueProps);
// Iterate over each queue to learn whether it supports presenting:
IntBuffer supportsPresent = mallocStackInt(queueCount);
for (int i = 0; i < queueCount; i++) {
supportsPresent.position(i);
int err = vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, i, surface, supportsPresent);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to physical device surface support: " + translateVulkanResult(err));
}
}
// Search for a graphics and a present queue in the array of queue families, try to find one that supports both
int graphicsQueueNodeIndex = Integer.MAX_VALUE;
int presentQueueNodeIndex = Integer.MAX_VALUE;
for (int i = 0; i < queueCount; i++) {
if ((queueProps.get(i).queueFlags() & VK_QUEUE_GRAPHICS_BIT) != 0) {
if (graphicsQueueNodeIndex == Integer.MAX_VALUE) {
graphicsQueueNodeIndex = i;
}
if (supportsPresent.get(i) == VK_TRUE) {
graphicsQueueNodeIndex = i;
presentQueueNodeIndex = i;
break;
}
}
}
if (presentQueueNodeIndex == Integer.MAX_VALUE) {
// If there's no queue that supports both present and graphics try to find a separate present queue
for (int i = 0; i < queueCount; ++i) {
if (supportsPresent.get(i) == VK_TRUE) {
presentQueueNodeIndex = i;
break;
}
}
}
// Generate error if could not find both a graphics and a present queue
if (graphicsQueueNodeIndex == Integer.MAX_VALUE) {
throw new AssertionError("No graphics queue found");
}
if (presentQueueNodeIndex == Integer.MAX_VALUE) {
throw new AssertionError("No presentation queue found");
}
if (graphicsQueueNodeIndex != presentQueueNodeIndex) {
throw new AssertionError("Presentation queue != graphics queue");
}
// Get list of supported formats
IntBuffer pFormatCount = mallocStackInt(1);
int err = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pFormatCount, null);
int formatCount = pFormatCount.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to query number of physical device surface formats: " + translateVulkanResult(err));
}
VkSurfaceFormatKHR.Buffer surfFormats = VkSurfaceFormatKHR.callocStack(formatCount);
err = vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pFormatCount, surfFormats);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to query physical device surface formats: " + translateVulkanResult(err));
}
// If the format list includes just one entry of VK_FORMAT_UNDEFINED, the surface has no preferred format. Otherwise, at least one supported format will
// be returned.
int colorFormat;
if (formatCount == 1 && surfFormats.get(0).format() == VK_FORMAT_UNDEFINED) {
colorFormat = VK_FORMAT_B8G8R8A8_UNORM;
} else {
colorFormat = surfFormats.get(0).format();
}
int colorSpace = surfFormats.get(0).colorSpace();
ColorFormatAndSpace ret = new ColorFormatAndSpace();
ret.colorFormat = colorFormat;
ret.colorSpace = colorSpace;
return ret;
}
private static long createCommandPool(VkDevice device, int queueNodeIndex) {
VkCommandPoolCreateInfo cmdPoolInfo = VkCommandPoolCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO)
.queueFamilyIndex(queueNodeIndex)
.flags(VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT);
LongBuffer pCmdPool = mallocStackLong(1);
int err = vkCreateCommandPool(device, cmdPoolInfo, null, pCmdPool);
long commandPool = pCmdPool.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create command pool: " + translateVulkanResult(err));
}
return commandPool;
}
private static VkQueue createDeviceQueue(VkDevice device, int queueFamilyIndex) {
PointerBuffer pQueue = mallocStackPointer(1);
vkGetDeviceQueue(device, queueFamilyIndex, 0, pQueue);
long queue = pQueue.get(0);
return new VkQueue(queue, device);
}
private static VkCommandBuffer createCommandBuffer(VkDevice device, long commandPool) {
VkCommandBufferAllocateInfo cmdBufAllocateInfo = VkCommandBufferAllocateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO)
.commandPool(commandPool)
.level(VK_COMMAND_BUFFER_LEVEL_PRIMARY)
.commandBufferCount(1);
PointerBuffer pCommandBuffer = mallocStackPointer(1);
int err = vkAllocateCommandBuffers(device, cmdBufAllocateInfo, pCommandBuffer);
long commandBuffer = pCommandBuffer.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to allocate command buffer: " + translateVulkanResult(err));
}
return new VkCommandBuffer(commandBuffer, device);
}
private static void imageBarrier(VkCommandBuffer cmdbuffer, long image, int aspectMask, int oldImageLayout, int newImageLayout) {
// Create an image barrier object
VkImageMemoryBarrier.Buffer imageMemoryBarrier = VkImageMemoryBarrier.callocStack(1)
.sType(VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER)
.pNext(NULL)
.oldLayout(oldImageLayout)
.newLayout(newImageLayout)
.srcQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
.dstQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
.image(image);
imageMemoryBarrier.subresourceRange()
.aspectMask(aspectMask)
.baseMipLevel(0)
.levelCount(1)
.layerCount(1);
// Source layouts (old)
// Undefined layout
// Only allowed as initial layout!
// Make sure any writes to the image have been finished
if (oldImageLayout == VK_IMAGE_LAYOUT_UNDEFINED) {
//imageMemoryBarrier.srcAccessMask(VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT);
imageMemoryBarrier.srcAccessMask(0);// <- validation layer tells that this must be 0
}
// Old layout is color attachment
// Make sure any writes to the color buffer have been finished
if (oldImageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
imageMemoryBarrier.srcAccessMask(VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
}
// Old layout is transfer source
// Make sure any reads from the image have been finished
if (oldImageLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
imageMemoryBarrier.srcAccessMask(VK_ACCESS_TRANSFER_READ_BIT);
}
// Old layout is shader read (sampler, input attachment)
// Make sure any shader reads from the image have been finished
if (oldImageLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
imageMemoryBarrier.srcAccessMask(VK_ACCESS_SHADER_READ_BIT);
}
// Target layouts (new)
// New layout is transfer destination (copy, blit)
// Make sure any copyies to the image have been finished
if (newImageLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
imageMemoryBarrier.dstAccessMask(VK_ACCESS_TRANSFER_WRITE_BIT);
}
// New layout is transfer source (copy, blit)
// Make sure any reads from and writes to the image have been finished
if (newImageLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL) {
imageMemoryBarrier.srcAccessMask(imageMemoryBarrier.srcAccessMask() | VK_ACCESS_TRANSFER_READ_BIT);
imageMemoryBarrier.dstAccessMask(VK_ACCESS_TRANSFER_READ_BIT);
}
// New layout is color attachment
// Make sure any writes to the color buffer hav been finished
if (newImageLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
imageMemoryBarrier.dstAccessMask(VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT);
imageMemoryBarrier.srcAccessMask(VK_ACCESS_TRANSFER_READ_BIT);
}
// New layout is depth attachment
// Make sure any writes to depth/stencil buffer have been finished
if (newImageLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
imageMemoryBarrier.dstAccessMask(imageMemoryBarrier.dstAccessMask() | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT);
}
// New layout is shader read (sampler, input attachment)
// Make sure any writes to the image have been finished
if (newImageLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
imageMemoryBarrier.srcAccessMask(VK_ACCESS_HOST_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT);
imageMemoryBarrier.dstAccessMask(VK_ACCESS_SHADER_READ_BIT);
}
// Put barrier on top
int srcStageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
int destStageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
// Put barrier inside setup command buffer
vkCmdPipelineBarrier(cmdbuffer, srcStageFlags, destStageFlags, VK_FLAGS_NONE,
null, // no memory barriers
null, // no buffer memory barriers
imageMemoryBarrier); // one image memory barrier
}
private static class Swapchain {
long swapchainHandle;
long[] images;
long[] imageViews;
}
private static Swapchain createSwapChain(VkDevice device, VkPhysicalDevice physicalDevice, long surface, long oldSwapChain, VkCommandBuffer commandBuffer, int width,
int height, int colorFormat, int colorSpace) {
int err;
// Get physical device surface properties and formats
VkSurfaceCapabilitiesKHR surfCaps = VkSurfaceCapabilitiesKHR.callocStack();
err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, surfCaps);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get physical device surface capabilities: " + translateVulkanResult(err));
}
IntBuffer pPresentModeCount = mallocStackInt(1);
err = vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, null);
int presentModeCount = pPresentModeCount.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get number of physical device surface presentation modes: " + translateVulkanResult(err));
}
IntBuffer pPresentModes = mallocStackInt(presentModeCount);
err = vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get physical device surface presentation modes: " + translateVulkanResult(err));
}
// Try to use mailbox mode. Low latency and non-tearing
int swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
for (int i = 0; i < presentModeCount; i++) {
if (pPresentModes.get(i) == VK_PRESENT_MODE_MAILBOX_KHR) {
swapchainPresentMode = VK_PRESENT_MODE_MAILBOX_KHR;
break;
}
if ((swapchainPresentMode != VK_PRESENT_MODE_MAILBOX_KHR) && (pPresentModes.get(i) == VK_PRESENT_MODE_IMMEDIATE_KHR)) {
swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
}
}
// Determine the number of images
int desiredNumberOfSwapchainImages = surfCaps.minImageCount() + 1;
if ((surfCaps.maxImageCount() > 0) && (desiredNumberOfSwapchainImages > surfCaps.maxImageCount())) {
desiredNumberOfSwapchainImages = surfCaps.maxImageCount();
}
int preTransform;
if ((surfCaps.supportedTransforms() & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) != 0) {
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
} else {
preTransform = surfCaps.currentTransform();
}
VkSwapchainCreateInfoKHR swapchainCI = VkSwapchainCreateInfoKHR.callocStack()
.sType(VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR)
.pNext(NULL)
.surface(surface)
.minImageCount(desiredNumberOfSwapchainImages)
.imageFormat(colorFormat)
.imageColorSpace(colorSpace)
.imageUsage(VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
.preTransform(preTransform)
.imageArrayLayers(1)
.imageSharingMode(VK_SHARING_MODE_EXCLUSIVE)
.pQueueFamilyIndices(null)
.presentMode(swapchainPresentMode)
.oldSwapchain(oldSwapChain)
.clipped(VK_TRUE)
.compositeAlpha(VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR);
swapchainCI.imageExtent()
.width(width)
.height(height);
LongBuffer pSwapChain = mallocStackLong(1);
err = vkCreateSwapchainKHR(device, swapchainCI, null, pSwapChain);
long swapChain = pSwapChain.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create swap chain: " + translateVulkanResult(err));
}
// If we just re-created an existing swapchain, we should destroy the old swapchain at this point.
// Note: destroying the swapchain also cleans up all its associated presentable images once the platform is done with them.
if (oldSwapChain != VK_NULL_HANDLE) {
vkDestroySwapchainKHR(device, oldSwapChain, null);
}
IntBuffer pImageCount = mallocStackInt(1);
err = vkGetSwapchainImagesKHR(device, swapChain, pImageCount, null);
int imageCount = pImageCount.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get number of swapchain images: " + translateVulkanResult(err));
}
LongBuffer pSwapchainImages = mallocStackLong(imageCount);
err = vkGetSwapchainImagesKHR(device, swapChain, pImageCount, pSwapchainImages);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to get swapchain images: " + translateVulkanResult(err));
}
long[] images = new long[imageCount];
long[] imageViews = new long[imageCount];
LongBuffer pBufferView = mallocStackLong(1);
VkImageViewCreateInfo colorAttachmentView = VkImageViewCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO)
.pNext(NULL)
.format(colorFormat)
.viewType(VK_IMAGE_VIEW_TYPE_2D)
.flags(VK_FLAGS_NONE);
colorAttachmentView.components()
.r(VK_COMPONENT_SWIZZLE_R)
.g(VK_COMPONENT_SWIZZLE_G)
.b(VK_COMPONENT_SWIZZLE_B)
.a(VK_COMPONENT_SWIZZLE_A);
colorAttachmentView.subresourceRange()
.aspectMask(VK_IMAGE_ASPECT_COLOR_BIT)
.baseMipLevel(0)
.levelCount(1)
.baseArrayLayer(0)
.layerCount(1);
for (int i = 0; i < imageCount; i++) {
images[i] = pSwapchainImages.get(i);
// Bring the image from an UNDEFINED state to the PRESENT_SRC state
imageBarrier(commandBuffer, images[i], VK_IMAGE_ASPECT_COLOR_BIT, VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR);
colorAttachmentView.image(images[i]);
err = vkCreateImageView(device, colorAttachmentView, null, pBufferView);
imageViews[i] = pBufferView.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create image view: " + translateVulkanResult(err));
}
}
Swapchain ret = new Swapchain();
ret.images = images;
ret.imageViews = imageViews;
ret.swapchainHandle = swapChain;
return ret;
}
private static long createClearRenderPass(VkDevice device, int colorFormat) {
VkAttachmentDescription.Buffer attachments = VkAttachmentDescription.callocStack(1)
.format(colorFormat)
.samples(VK_SAMPLE_COUNT_1_BIT)
.loadOp(VK_ATTACHMENT_LOAD_OP_CLEAR)
.storeOp(VK_ATTACHMENT_STORE_OP_STORE)
.stencilLoadOp(VK_ATTACHMENT_LOAD_OP_DONT_CARE)
.stencilStoreOp(VK_ATTACHMENT_STORE_OP_DONT_CARE)
.initialLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
.finalLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
VkAttachmentReference.Buffer colorReference = VkAttachmentReference.callocStack(1)
.attachment(0)
.layout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
VkSubpassDescription.Buffer subpass = VkSubpassDescription.callocStack(1)
.pipelineBindPoint(VK_PIPELINE_BIND_POINT_GRAPHICS)
.flags(VK_FLAGS_NONE)
.pInputAttachments(null)
.colorAttachmentCount(colorReference.remaining())
.pColorAttachments(colorReference)
.pResolveAttachments(null)
.pDepthStencilAttachment(null)
.pPreserveAttachments(null);
VkRenderPassCreateInfo renderPassInfo = VkRenderPassCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO)
.pNext(NULL)
.pAttachments(attachments)
.pSubpasses(subpass)
.pDependencies(null);
LongBuffer pRenderPass = mallocStackLong(1);
int err = vkCreateRenderPass(device, renderPassInfo, null, pRenderPass);
long renderPass = pRenderPass.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create clear render pass: " + translateVulkanResult(err));
}
return renderPass;
}
private static long[] createFramebuffers(VkDevice device, Swapchain swapchain, long renderPass, int width, int height) {
LongBuffer attachments = mallocStackLong(1);
VkFramebufferCreateInfo fci = VkFramebufferCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO)
.pAttachments(attachments)
.flags(VK_FLAGS_NONE)
.height(height)
.width(width)
.layers(1)
.pNext(NULL)
.renderPass(renderPass);
// Create a framebuffer for each swapchain image
long[] framebuffers = new long[swapchain.images.length];
LongBuffer pFramebuffer = mallocStackLong(1);
for (int i = 0; i < swapchain.images.length; i++) {
attachments.put(0, swapchain.imageViews[i]);
int err = vkCreateFramebuffer(device, fci, null, pFramebuffer);
long framebuffer = pFramebuffer.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create framebuffer: " + translateVulkanResult(err));
}
framebuffers[i] = framebuffer;
}
return framebuffers;
}
private static void submitCommandBuffer(VkQueue queue, VkCommandBuffer commandBuffer) {
if (commandBuffer == null || commandBuffer.address() == NULL)
return;
VkSubmitInfo submitInfo = VkSubmitInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_SUBMIT_INFO);
PointerBuffer pCommandBuffers = mallocStackPointer(1)
.put(commandBuffer)
.flip();
submitInfo.pCommandBuffers(pCommandBuffers);
int err = vkQueueSubmit(queue, submitInfo, VK_NULL_HANDLE);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to submit command buffer: " + translateVulkanResult(err));
}
}
private static VkCommandBuffer[] createRenderCommandBuffers(VkDevice device, long commandPool, long[] framebuffers, long renderPass, int width, int height) {
// Create the render command buffers (one command buffer per framebuffer image)
VkCommandBufferAllocateInfo cmdBufAllocateInfo = VkCommandBufferAllocateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO)
.commandPool(commandPool)
.level(VK_COMMAND_BUFFER_LEVEL_PRIMARY)
.commandBufferCount(framebuffers.length);
PointerBuffer pCommandBuffer = mallocStackPointer(framebuffers.length);
int err = vkAllocateCommandBuffers(device, cmdBufAllocateInfo, pCommandBuffer);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to allocate render command buffer: " + translateVulkanResult(err));
}
VkCommandBuffer[] renderCommandBuffers = new VkCommandBuffer[framebuffers.length];
for (int i = 0; i < framebuffers.length; i++) {
renderCommandBuffers[i] = new VkCommandBuffer(pCommandBuffer.get(i), device);
}
// Create the command buffer begin structure
VkCommandBufferBeginInfo cmdBufInfo = VkCommandBufferBeginInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO)
.pNext(NULL);
// Specify clear color (cornflower blue)
VkClearValue.Buffer clearValues = VkClearValue.callocStack(1);
clearValues.color()
.float32(0, 100/255.0f)
.float32(1, 149/255.0f)
.float32(2, 237/255.0f)
.float32(3, 1.0f);
// Specify everything to begin a render pass
VkRenderPassBeginInfo renderPassBeginInfo = VkRenderPassBeginInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO)
.pNext(NULL)
.renderPass(renderPass)
.pClearValues(clearValues);
VkRect2D renderArea = renderPassBeginInfo.renderArea();
renderArea.offset()
.x(0)
.y(0);
renderArea.extent()
.width(width)
.height(height);
for (int i = 0; i < renderCommandBuffers.length; ++i) {
// Set target frame buffer
renderPassBeginInfo.framebuffer(framebuffers[i]);
err = vkBeginCommandBuffer(renderCommandBuffers[i], cmdBufInfo);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to begin render command buffer: " + translateVulkanResult(err));
}
vkCmdBeginRenderPass(renderCommandBuffers[i], renderPassBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
// Update dynamic viewport state
VkViewport.Buffer viewport = VkViewport.callocStack(1)
.height(height)
.width(width)
.minDepth(0.0f)
.maxDepth(1.0f);
vkCmdSetViewport(renderCommandBuffers[i], 0, viewport);
// Update dynamic scissor state
VkRect2D.Buffer scissor = VkRect2D.callocStack(1);
scissor.extent()
.width(width)
.height(height);
scissor.offset()
.x(0)
.y(0);
vkCmdSetScissor(renderCommandBuffers[i], 0, scissor);
vkCmdEndRenderPass(renderCommandBuffers[i]);
// Add a present memory barrier to the end of the command buffer
// This will transform the frame buffer color attachment to a
// new layout for presenting it to the windowing system integration
VkImageMemoryBarrier.Buffer prePresentBarrier = createPrePresentBarrier(swapchain.images[i]);
vkCmdPipelineBarrier(renderCommandBuffers[i],
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
VK_FLAGS_NONE,
null, // No memory barriers
null, // No buffer memory barriers
prePresentBarrier); // One image memory barrier
prePresentBarrier.free();
err = vkEndCommandBuffer(renderCommandBuffers[i]);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to begin render command buffer: " + translateVulkanResult(err));
}
}
return renderCommandBuffers;
}
private static VkImageMemoryBarrier.Buffer createPrePresentBarrier(long presentImage) {
VkImageMemoryBarrier.Buffer imageMemoryBarrier = VkImageMemoryBarrier.calloc(1)
.sType(VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER)
.pNext(NULL)
.srcAccessMask(VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT)
.dstAccessMask(0)
.oldLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
.newLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
.srcQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
.dstQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED);
imageMemoryBarrier.subresourceRange()
.aspectMask(VK_IMAGE_ASPECT_COLOR_BIT)
.baseMipLevel(0)
.levelCount(1)
.baseArrayLayer(0)
.layerCount(1);
imageMemoryBarrier.image(presentImage);
return imageMemoryBarrier;
}
private static VkImageMemoryBarrier.Buffer createPostPresentBarrier(long presentImage) {
VkImageMemoryBarrier.Buffer imageMemoryBarrier = VkImageMemoryBarrier.calloc(1)
.sType(VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER)
.pNext(NULL)
.srcAccessMask(0)
.dstAccessMask(VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT)
.oldLayout(VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
.newLayout(VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
.srcQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED)
.dstQueueFamilyIndex(VK_QUEUE_FAMILY_IGNORED);
imageMemoryBarrier.subresourceRange()
.aspectMask(VK_IMAGE_ASPECT_COLOR_BIT)
.baseMipLevel(0)
.levelCount(1)
.baseArrayLayer(0)
.layerCount(1);
imageMemoryBarrier.image(presentImage);
return imageMemoryBarrier;
}
private static void submitPostPresentBarrier(long image, VkCommandBuffer commandBuffer, VkQueue queue) {
VkCommandBufferBeginInfo cmdBufInfo = VkCommandBufferBeginInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO)
.pNext(NULL);
int err = vkBeginCommandBuffer(commandBuffer, cmdBufInfo);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to begin command buffer: " + translateVulkanResult(err));
}
VkImageMemoryBarrier.Buffer postPresentBarrier = createPostPresentBarrier(image);
vkCmdPipelineBarrier(
commandBuffer,
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
VK_FLAGS_NONE,
null, // No memory barriers,
null, // No buffer barriers,
postPresentBarrier); // one image barrier
postPresentBarrier.free();
err = vkEndCommandBuffer(commandBuffer);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to wait for idle queue: " + translateVulkanResult(err));
}
// Submit the command buffer
submitCommandBuffer(queue, commandBuffer);
}
/*
* All resources that must be reallocated on window resize.
*/
private static Swapchain swapchain;
private static long[] framebuffers;
private static VkCommandBuffer[] renderCommandBuffers;
static {
/* Configure LWJGL stack. We don't even need a whole Kilobyte. */
Configuration.STACK_SIZE.set(1);
}
public static void main(String[] args) {
if (glfwInit() != GLFW_TRUE) {
throw new RuntimeException("Failed to initialize GLFW");
}
if (glfwVulkanSupported() == GLFW_FALSE) {
throw new AssertionError("GLFW failed to find the Vulkan loader");
}
/* Look for instance extensions */
PointerBuffer requiredExtensions = glfwGetRequiredInstanceExtensions();
if (requiredExtensions == null) {
throw new AssertionError("Failed to find list of required Vulkan extensions");
}
// Create the Vulkan instance
final VkInstance instance = createInstance(requiredExtensions);
final VkDebugReportCallbackEXT debugCallback = new VkDebugReportCallbackEXT() {
public int invoke(int flags, int objectType, long object, long location, int messageCode, long pLayerPrefix, long pMessage, long pUserData) {
System.err.println("ERROR OCCURED: " + memDecodeASCII(pMessage));
return 0;
}
};
final long debugCallbackHandle = setupDebugging(instance, VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT, debugCallback);
final VkPhysicalDevice physicalDevice = getFirstPhysicalDevice(instance);
final DeviceAndGraphicsQueueFamily deviceAndGraphicsQueueFamily = createDeviceAndGetGraphicsQueueFamily(physicalDevice);
final VkDevice device = deviceAndGraphicsQueueFamily.device;
int queueFamilyIndex = deviceAndGraphicsQueueFamily.queueFamilyIndex;
// Create GLFW window
glfwDefaultWindowHints();
glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API);
glfwWindowHint(GLFW_VISIBLE, GLFW_FALSE);
long window = glfwCreateWindow(800, 600, "GLFW Vulkan Demo", NULL, NULL);
GLFWKeyCallback keyCallback;
glfwSetKeyCallback(window, keyCallback = new GLFWKeyCallback() {
public void invoke(long window, int key, int scancode, int action, int mods) {
if (action != GLFW_RELEASE)
return;
if (key == GLFW_KEY_ESCAPE)
glfwSetWindowShouldClose(window, GLFW_TRUE);
}
});
LongBuffer pSurface = memAllocLong(1);
int err = glfwCreateWindowSurface(instance, window, null, pSurface);
final long surface = pSurface.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create surface: " + translateVulkanResult(err));
}
// Create static Vulkan resources
final ColorFormatAndSpace colorFormatAndSpace = getColorFormatAndSpace(physicalDevice, surface);
final long commandPool = createCommandPool(device, queueFamilyIndex);
final VkCommandBuffer setupCommandBuffer = createCommandBuffer(device, commandPool);
final VkCommandBuffer postPresentCommandBuffer = createCommandBuffer(device, commandPool);
final VkQueue queue = createDeviceQueue(device, queueFamilyIndex);
final long clearRenderPass = createClearRenderPass(device, colorFormatAndSpace.colorFormat);
final long renderCommandPool = createCommandPool(device, queueFamilyIndex);
final class SwapchainRecreator {
boolean mustRecreate = true;
int width;
int height;
void recreate() {
// Begin the setup command buffer (the one we will use for swapchain/framebuffer creation)
VkCommandBufferBeginInfo cmdBufInfo = VkCommandBufferBeginInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO)
.pNext(NULL);
int err = vkBeginCommandBuffer(setupCommandBuffer, cmdBufInfo);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to begin setup command buffer: " + translateVulkanResult(err));
}
long oldChain = swapchain != null ? swapchain.swapchainHandle : VK_NULL_HANDLE;
// Create the swapchain (this will also add a memory barrier to initialize the framebuffer images)
swapchain = createSwapChain(device, physicalDevice, surface, oldChain, setupCommandBuffer,
width, height, colorFormatAndSpace.colorFormat, colorFormatAndSpace.colorSpace);
err = vkEndCommandBuffer(setupCommandBuffer);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to end setup command buffer: " + translateVulkanResult(err));
}
submitCommandBuffer(queue, setupCommandBuffer);
vkQueueWaitIdle(queue);
if (framebuffers != null) {
for (int i = 0; i < framebuffers.length; i++)
vkDestroyFramebuffer(device, framebuffers[i], null);
}
framebuffers = createFramebuffers(device, swapchain, clearRenderPass, width, height);
// Create render command buffers
if (renderCommandBuffers != null) {
vkResetCommandPool(device, renderCommandPool, VK_FLAGS_NONE);
}
renderCommandBuffers = createRenderCommandBuffers(device, renderCommandPool, framebuffers, clearRenderPass, width, height);
mustRecreate = false;
}
}
final SwapchainRecreator swapchainRecreator = new SwapchainRecreator();
// Handle canvas resize
GLFWWindowSizeCallback windowSizeCallback = new GLFWWindowSizeCallback() {
public void invoke(long window, int width, int height) {
if (width <= 0 || height <= 0)
return;
swapchainRecreator.width = width;
swapchainRecreator.height = height;
swapchainRecreator.mustRecreate = true;
}
};
glfwSetWindowSizeCallback(window, windowSizeCallback);
glfwShowWindow(window);
// Pre-allocate everything needed in the render loop
IntBuffer pImageIndex = mallocStackInt(1);
int currentBuffer = 0;
PointerBuffer pCommandBuffers = mallocStackPointer(1);
LongBuffer pSwapchains = mallocStackLong(1);
LongBuffer pImageAcquiredSemaphore = mallocStackLong(1);
LongBuffer pRenderCompleteSemaphore = mallocStackLong(1);
// Info struct to create a semaphore
VkSemaphoreCreateInfo semaphoreCreateInfo = VkSemaphoreCreateInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO)
.pNext(NULL)
.flags(VK_FLAGS_NONE);
// Info struct to submit a command buffer which will wait on the semaphore
IntBuffer pWaitDstStageMask = mallocStackInt(1);
pWaitDstStageMask.put(0, VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT);
VkSubmitInfo submitInfo = VkSubmitInfo.callocStack()
.sType(VK_STRUCTURE_TYPE_SUBMIT_INFO)
.pNext(NULL)
.waitSemaphoreCount(pImageAcquiredSemaphore.remaining())
.pWaitSemaphores(pImageAcquiredSemaphore)
.pWaitDstStageMask(pWaitDstStageMask)
.pCommandBuffers(pCommandBuffers)
.pSignalSemaphores(pRenderCompleteSemaphore);
// Info struct to present the current swapchain image to the display
VkPresentInfoKHR presentInfo = VkPresentInfoKHR.callocStack()
.sType(VK_STRUCTURE_TYPE_PRESENT_INFO_KHR)
.pNext(NULL)
.pWaitSemaphores(pRenderCompleteSemaphore)
.swapchainCount(pSwapchains.remaining())
.pSwapchains(pSwapchains)
.pImageIndices(pImageIndex)
.pResults(null);
// The render loop
while (glfwWindowShouldClose(window) == GLFW_FALSE) {
// Handle window messages. Resize events happen exactly here.
// So it is safe to use the new swapchain images and framebuffers afterwards.
glfwPollEvents();
if (swapchainRecreator.mustRecreate)
swapchainRecreator.recreate();
// Create a semaphore to wait for the swapchain to acquire the next image
err = vkCreateSemaphore(device, semaphoreCreateInfo, null, pImageAcquiredSemaphore);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create image acquired semaphore: " + translateVulkanResult(err));
}
// Create a semaphore to wait for the render to complete, before presenting
err = vkCreateSemaphore(device, semaphoreCreateInfo, null, pRenderCompleteSemaphore);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to create render complete semaphore: " + translateVulkanResult(err));
}
// Get next image from the swap chain (back/front buffer).
// This will setup the imageAquiredSemaphore to be signalled when the operation is complete
err = vkAcquireNextImageKHR(device, swapchain.swapchainHandle, UINT64_MAX, pImageAcquiredSemaphore.get(0), VK_NULL_HANDLE, pImageIndex);
currentBuffer = pImageIndex.get(0);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to acquire next swapchain image: " + translateVulkanResult(err));
}
// Select the command buffer for the current framebuffer image/attachment
pCommandBuffers.put(0, renderCommandBuffers[currentBuffer]);
// Submit to the graphics queue
err = vkQueueSubmit(queue, submitInfo, VK_NULL_HANDLE);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to submit render queue: " + translateVulkanResult(err));
}
// Present the current buffer to the swap chain
// This will display the image
pSwapchains.put(0, swapchain.swapchainHandle);
err = vkQueuePresentKHR(queue, presentInfo);
if (err != VK_SUCCESS) {
throw new AssertionError("Failed to present the swapchain image: " + translateVulkanResult(err));
}
// Destroy this semaphore (we will create a new one in the next frame)
vkDestroySemaphore(device, pImageAcquiredSemaphore.get(0), null);
vkDestroySemaphore(device, pRenderCompleteSemaphore.get(0), null);
// Create and submit post present barrier
vkQueueWaitIdle(queue);
submitPostPresentBarrier(swapchain.images[currentBuffer], postPresentCommandBuffer, queue);
}
vkDestroyDebugReportCallbackEXT(instance, debugCallbackHandle, null);
windowSizeCallback.free();
keyCallback.free();
glfwDestroyWindow(window);
glfwTerminate();
// We don't bother disposing of all Vulkan resources.
// Let the OS process manager take care of it.
}
}
|
package com.telefonica.iot.cygnus.sinks;
import static org.junit.Assert.*; // this is required by "fail" like assertions
import com.google.gson.JsonPrimitive;
import com.telefonica.iot.cygnus.aggregation.NGSIGenericAggregator;
import com.telefonica.iot.cygnus.containers.NotifyContextRequest;
import static com.telefonica.iot.cygnus.utils.CommonUtilsForTests.getTestTraceHead;
import com.telefonica.iot.cygnus.backends.sql.SQLQueryUtils;
import com.telefonica.iot.cygnus.errors.CygnusBadConfiguration;
import com.telefonica.iot.cygnus.interceptors.NGSIEvent;
import com.telefonica.iot.cygnus.utils.CommonConstants;
import com.telefonica.iot.cygnus.utils.NGSIConstants;
import com.telefonica.iot.cygnus.utils.NGSIUtils;
import org.apache.flume.Context;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
/**
*
* @author hermanjunge
*/
@RunWith(MockitoJUnitRunner.class)
public class NGSIPostgreSQLSinkTest {
/**
* Constructor.
*/
public NGSIPostgreSQLSinkTest() {
LogManager.getRootLogger().setLevel(Level.FATAL);
} // NGSIPostgreSQLSinkTest
@Test
public void testConfigureEnableEncoding() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = "falso";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
try {
assertTrue(sink.getInvalidConfiguration());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - 'enable_encoding=falso' was detected");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- FAIL - 'enable_encoding=falso' was not detected");
throw e;
} // try catch
} // testConfigureEnableEncoding
@Test
public void testConfigureEnableLowercase() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = "falso";
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
try {
assertTrue(sink.getInvalidConfiguration());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - 'enable_lowercase=falso' was detected");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- FAIL - 'enable_lowercase=falso' was not detected");
throw e;
} // try catch
} // testConfigureEnableLowercase
@Test
public void testConfigureEnableGrouping() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null; // default
String enableGrouping = "falso";
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
try {
assertTrue(sink.getInvalidConfiguration());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - 'enable_grouping=falso' was detected");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- FAIL - 'enable_grouping=falso' was not detected");
throw e;
} // try catch
} // testConfigureEnableGrouping
// TBD: check for dataModel values in NGSIPostgreSQLSink and uncomment this test.
// @Test
public void testConfigureDataModel() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-service";
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
try {
assertTrue(sink.getInvalidConfiguration());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - 'data_model=dm-by-service' was detected");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- FAIL - 'data_model=dm-by-service' was not detected");
throw e;
} // try catch
} // testConfigureDataModel
@Test
public void testConfigureAttrPersistence() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = "fila";
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
try {
assertTrue(sink.getInvalidConfiguration());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - 'attr_persistence=fila' was detected");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- FAIL - 'attr_persistence=fila' was not detected");
throw e;
} // try catch
} // testConfigureAttrPersistence
@Test
public void testConfigureSQLOptionsIsNull() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null;
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
assertNull(sink.getPostgreSQLOptions());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - postgresqlOptions is null when it is not configured");
} // testConfigureSQLOptionsIsNull
@Test
public void testConfigureSQLOptionsHasValue() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null;
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
String sqlOptions = "sslmode=require";
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache, sqlOptions));
assertEquals(sqlOptions, sink.getPostgreSQLOptions());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - postgresqlOptions has value when it is configured");
} // testConfigureSQLOptionsHasValue
@Test
public void testBuildSchemaNameOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameOldEncoding]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
String servicePath = "someServicePath";
try {
String builtSchemaName = sink.buildSchemaName(service, servicePath);
String expectedDBName = "someService";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameOldEncoding]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameOldEncoding]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameOldEncoding]")
+ "- FAIL - There was some problem when building the DB name");
throw e;
} // try catch
} // testBuildDBNameOldEncoding
@Test
public void testBuildSchemaNameNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameNewEncoding]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
String servicePath = "someServicePath";
try {
String builtSchemaName = sink.buildSchemaName(service, servicePath);
String expectedDBName = "somex0053ervice";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameNewEncoding]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameNewEncoding]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildSchemaNameNewEncoding]")
+ "- FAIL - There was some problem when building the DB name");
throw e;
} // try catch
} // testBuildDBNameNewEncoding
@Test
public void testBuildDBNameOldEncodingDatabaseDataModel() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingDatabaseDataModel]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-database"; // default
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
try {
String builtSchemaName = sink.buildDBName(service);
String expectedDBName = "someService";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingDatabaseDataModel]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingDatabaseDataModel]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingDatabaseDataModel]")
+ "- FAIL - There was some problem when building the Schema name");
throw e;
} // try catch
} // testBuildDBNameOldEncodingDatabaseDataModel
@Test
public void testBuildDBNameOldEncodingEntityTypeDatabaseDataModel() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type-database"; // default
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
try {
String builtSchemaName = sink.buildDBName(service);
String expectedDBName = "someService";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncodingEntityTypeDatabaseDataModel]")
+ "- FAIL - There was some problem when building the Schema name");
throw e;
} // try catch
} // testBuildDBNameOldEncodingEntityTypeDatabaseDataModel
@Test
public void testBuildDBNameOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncoding]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
try {
String builtSchemaName = sink.buildDBName(service);
// The default vale for the DB name
String expectedDBName = "postgres";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncoding]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncoding]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameOldEncoding]")
+ "- FAIL - There was some problem when building the Schema name");
throw e;
} // try catch } // try catch
} // testBuildDBNameOldEncoding
@Test
public void testBuildDBNameNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncoding]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
try {
String builtSchemaName = sink.buildDBName(service);
String expectedDBName = "postgres";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncoding]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncoding]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncoding]")
+ "- FAIL - There was some problem when building the DB name");
throw e;
} // try catch
} // testBuildDBNameNewEncoding
@Test
public void testBuildDBNameNewEncodingDatabaseDataModel() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingDatabaseDataModel]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-database-schema"; // default
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
try {
String builtSchemaName = sink.buildDBName(service);
String expectedDBName = "somex0053ervice";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingDatabaseDataModel]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingDatabaseDataModel]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingDatabaseDataModel]")
+ "- FAIL - There was some problem when building the DB name");
throw e;
} // try catch
} // testBuildDBNameNewEncodingDatabaseDataModel
@Test
public void testBuildDBNameNewEncodingEntityTypeDatabaseDataModel() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type-database-schema"; // default
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "someService";
try {
String builtSchemaName = sink.buildDBName(service);
String expectedDBName = "somex0053ervice";
try {
assertEquals(expectedDBName, builtSchemaName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]")
+ "- OK - '" + expectedDBName + "' is equals to the encoding of <service>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]")
+ "- FAIL - '" + expectedDBName + "' is not equals to the encoding of <service>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.testBuildDBNameNewEncodingEntityTypeDatabaseDataModel]")
+ "- FAIL - There was some problem when building the DB name");
throw e;
} // try catch
} // testBuildDBNameNewEncodingEntityTypeDatabaseDataModel
@Test
public void testBuildTableNameNonRootServicePathDataModelByServicePathOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of <service-path>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-service-path";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = null; // irrelevant for this test
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "somePath";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByServicePathOldEncoding
@Test
public void testBuildTableNameNonRootServicePathDataModelByServicePathNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of <service-path>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-service-path";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = null; // irrelevant for this test
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "x002fsomex0050ath";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByServicePathNewEncoding
@Test
public void testBuildTableNameNonRootServicePathDataModelByEntityOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entityType = null; // irrelevant for this test
String entity = "someId=someType";
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "somePath_someId_someType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> "
+ "and <entityType>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, "
+ "<entityId> and <entityType>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByEntityOldEncoding
@Test
public void testBuildTableNameNonRootServicePathDataModelByEntityNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = "someId=someType";
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "x002fsomex0050athxffffsomex0049dxffffsomex0054ype";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> "
+ "and <entityType>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, "
+ "<entityId> and <entityType>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByEntityNewEncoding
@Test
public void testBuildTableNameNonRootServicePathDataModelByEntityTypeOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-entity-type' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = "someId=someType";
String entityType = "someType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "somePath_someType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> "
+ "and <entityType>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, "
+ "<entityId> and <entityType>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByEntityTypeOldEncoding
@Test
public void testBuildTableNameNonRootServicePathDataModelByEntityTypeNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-entity-type' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = "someId=someType";
String entityType = "someType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "x002fsomePathxffffsomeType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>, <entityId> "
+ "and <entityType>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>, "
+ "<entityId> and <entityType>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByEntityTypeNewEncoding
// NEW
@Test
public void testBuildTableNameNonRootServicePathDataModelByFixedEntityTypeOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-fixed-entity-type' the PostgreSQL table name is the encoding of <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-fixed-entity-type";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = "someId=someType";
String entityType = "someType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "someType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <entityType>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <entityType>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByEntityTypeOldEncoding
@Test
public void testBuildTableNameNonRootServicePathDataModelByFixedEntityTypeNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-fixed-entity-type' the PostgreSQL table name is the encoding of <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-fixed-entity-type";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/somePath";
String entity = "someId=someType";
String entityType = "someType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "somex0054ype";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <entityType>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <entityType>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameNonRootServicePathDataModelByEntityTypeNewEncoding
// NEW END
@Test
public void testBuildTableNameRootServicePathDataModelByServicePathOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name cannot be built");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-service-path";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/";
String entity = null; // irrelevant for this test
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
sink.buildTableName(servicePath, entity, entityType, attribute);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - The root service path was not detected as not valid");
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - The root service path was detected as not valid");
} // try catch
} // testBuildTableNameRootServicePathDataModelByServicePathOldEncoding
@Test
public void testBuildTableNameRootServicePathDataModelByServicePathNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of <service-path>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-service-path";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/";
String entity = null; // irrelevant for this test
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "x002f";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameRootServicePathDataModelByServicePathNewEncoding
@Test
public void testBuildTableNameRootServicePathDataModelByEntityOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/";
String entity = "someId=someType";
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "someId_someType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameRootServicePathDataModelByEntityOldencoding
@Test
public void testBuildTableNameRootServicePathDataModelByEntityNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-service-path' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/";
String entity = "someId=someType";
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "x002fxffffsomex0049dxffffsomex0054ype";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameRootServicePathDataModelByEntityNewEncoding
@Test
public void testBuildTableNameRootServicePathDataModelByEntityTypeOldEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-entity-type' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type";
String enableEncoding = "false";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/";
String entity = "someId=someType";
String entityType = "someType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "someType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameRootServicePathDataModelByEntityTypeOldEncoding
@Test
public void testBuildTableNameRootServicePathDataModelByEntityTypeNewEncoding() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "'dm-by-entity-type' the PostgreSQL table name is the encoding of the concatenation of <service-path>, "
+ "<entityId> and <entityType>");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type";
String enableEncoding = "true";
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/";
String entity = "someId=someType";
String entityType = "someType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
String builtTableName = sink.buildTableName(servicePath, entity, entityType, attribute);
String expecetedTableName = "x002fxffffsomeType";
try {
assertEquals(expecetedTableName, builtTableName);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - '" + builtTableName + "' is equals to the encoding of <service-path>");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - '" + builtTableName + "' is not equals to the encoding of <service-path>");
throw e;
} // try catch
} catch (Exception e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - There was some problem when building the table name");
throw e;
} // try catch
} // testBuildTableNameRootServicePathDataModelByEntityTypeNewEncoding
@Test
public void testBuildSchemaNameLength() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildSchemaName]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String service = "tooLooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooongService";
String servicePath = "someServicePath";
try {
sink.buildSchemaName(service, servicePath);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildSchemaName]")
+ "- FAIL - A schema name length greater than 63 characters has not been detected");
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildSchemaName]")
+ "- OK - A schema name length greater than 63 characters has been detected");
} // try catch
} // testBuildSchemaNameLength
@Test
public void testBuildTableNameLengthDataModelByServicePath() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "detected");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-service-path";
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/tooLooooooooooooooooooooooooooooooooooooooooooooooooooooooongServicePath";
String entity = null; // irrelevant for this test
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
sink.buildTableName(servicePath, entity, entityType, attribute);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - A table name length greater than 63 characters has not been detected");
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - A table name length greater than 63 characters has been detected");
} // try catch
} // testBuildTableNameLengthDataModelByServicePath
@Test
public void testBuildTableNameLengthDataModelByEntity() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity";
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/tooLooooooooooooooooooooongServicePath";
String entity = "tooLooooooooooooooooooooooooooongEntity";
String entityType = null; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
sink.buildTableName(servicePath, entity, entityType, attribute);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - A table name length greater than 63 characters has not been detected");
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - A table name length greater than 63 characters has been detected");
} // try catch
} // testBuildTableNameLengthDataModelByEntity
@Test
public void testBuildTableNameLengthDataModelByEntityType() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-entity-type";
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/tooLooooooooooooooooooooongServicePath";
String entity = "tooLooooooooooooooooooooooooooongEntity";
String entityType = "tooLooooooooooooooooooooooooooongEntityType"; // irrelevant for this test
String attribute = null; // irrelevant for this test
try {
sink.buildTableName(servicePath, entity, entityType, attribute);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - A table name length greater than 63 characters has not been detected");
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - A table name length greater than 63 characters has been detected");
} // try catch
} // testBuildTableNameLengthDataModelByEntityType
@Test
public void testBuildTableNameLengthDataModelByAttribute() throws Exception {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "
+ "detected");
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = "dm-by-attribute";
String enableEncoding = null; // default
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = null; // default
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
String servicePath = "/tooLooooooooooooooongServicePath";
String entity = "tooLooooooooooooooooooongEntity";
String entityType = null; // irrelevant for this test
String attribute = "tooLooooooooooooongAttribute";
try {
sink.buildTableName(servicePath, entity, entityType, attribute);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- FAIL - A table name length greater than 63 characters has not been detected");
assertTrue(false);
} catch (Exception e) {
assertTrue(true);
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.buildTableName]")
+ "- OK - A table name length greater than 63 characters has been detected");
} // try catch
} // testBuildTableNameLengthDataModelByAttribute
@Test
public void testConfigureCache() {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "
String attrPersistence = null; // default
String batchSize = null; // default
String batchTime = null; // default
String batchTTL = null; // default
String dataModel = null; // default
String enableEncoding = null;
String enableGrouping = null; // default
String enableLowercase = null; // default
String host = null; // default
String password = null; // default
String port = null; // default
String username = null; // default
String cache = "falso";
NGSIPostgreSQLSink sink = new NGSIPostgreSQLSink();
sink.configure(createContext(attrPersistence, batchSize, batchTime, batchTTL, dataModel, enableEncoding,
enableGrouping, enableLowercase, host, password, port, username, cache));
try {
assertTrue(sink.getInvalidConfiguration());
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- OK - 'enable_cache=falso' was detected");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[NGSIPostgreSQLSink.configure]")
+ "- FAIL - 'enable_cache=falso' was not detected");
throw e;
} // try catch
} // testConfigureEnableEncoding
private Context createContext(String attrPersistence, String batchSize, String batchTime, String batchTTL,
String dataModel, String enableEncoding, String enableGrouping, String enableLowercase, String host,
String password, String port, String username, String cache) {
Context context = new Context();
context.put("attr_persistence", attrPersistence);
context.put("batch_size", batchSize);
context.put("batch_time", batchTime);
context.put("batch_ttl", batchTTL);
context.put("data_model", dataModel);
context.put("enable_encoding", enableEncoding);
context.put("enable_grouping", enableGrouping);
context.put("enable_lowercase", enableLowercase);
context.put("postgresql_host", host);
context.put("postgresql_password", password);
context.put("postgresql_port", port);
context.put("postgresql_username", username);
context.put("backend.enable_cache", cache);
return context;
} // createContext
private Context createContext(String attrPersistence, String batchSize, String batchTime, String batchTTL,
String dataModel, String enableEncoding, String enableGrouping, String enableLowercase, String host,
String password, String port, String username, String cache, String sqlOptions) {
Context context = new Context();
context.put("attr_persistence", attrPersistence);
context.put("batch_size", batchSize);
context.put("batch_time", batchTime);
context.put("batch_ttl", batchTTL);
context.put("data_model", dataModel);
context.put("enable_encoding", enableEncoding);
context.put("enable_grouping", enableGrouping);
context.put("enable_lowercase", enableLowercase);
context.put("postgresql_host", host);
context.put("postgresql_password", password);
context.put("postgresql_port", port);
context.put("postgresql_username", username);
context.put("backend.enable_cache", cache);
context.put("postgresql_options", sqlOptions);
return context;
} // createContext
private Context createContextforNativeTypes(String attrPersistence, String batchSize, String batchTime, String batchTTL,
String dataModel, String enableEncoding, String enableGrouping, String enableLowercase, String host,
String password, String port, String username, String cache, String attrNativeTypes) {
Context context = new Context();
context.put("attr_persistence", attrPersistence);
context.put("batch_size", batchSize);
context.put("batch_time", batchTime);
context.put("batch_ttl", batchTTL);
context.put("data_model", dataModel);
context.put("enable_encoding", enableEncoding);
context.put("enable_grouping", enableGrouping);
context.put("enable_lowercase", enableLowercase);
context.put("postgresql_host", host);
context.put("postgresql_password", password);
context.put("postgresql_port", port);
context.put("postgresql_username", username);
context.put("backend.enable_cache", cache);
context.put("attr_native_types", attrNativeTypes);
return context;
} // createContextforNativeTypes
private NotifyContextRequest.ContextElement createContextElementForNativeTypes() {
NotifyContextRequest notifyContextRequest = new NotifyContextRequest();
NotifyContextRequest.ContextMetadata contextMetadata = new NotifyContextRequest.ContextMetadata();
contextMetadata.setName("someString");
contextMetadata.setType("string");
ArrayList<NotifyContextRequest.ContextMetadata> metadata = new ArrayList<>();
metadata.add(contextMetadata);
NotifyContextRequest.ContextAttribute contextAttribute1 = new NotifyContextRequest.ContextAttribute();
contextAttribute1.setName("someNumber");
contextAttribute1.setType("number");
contextAttribute1.setContextValue(new JsonPrimitive(2));
contextAttribute1.setContextMetadata(null);
NotifyContextRequest.ContextAttribute contextAttribute2 = new NotifyContextRequest.ContextAttribute();
contextAttribute2.setName("somneBoolean");
contextAttribute2.setType("Boolean");
contextAttribute2.setContextValue(new JsonPrimitive(true));
contextAttribute2.setContextMetadata(null);
NotifyContextRequest.ContextAttribute contextAttribute3 = new NotifyContextRequest.ContextAttribute();
contextAttribute3.setName("someDate");
contextAttribute3.setType("DateTime");
contextAttribute3.setContextValue(new JsonPrimitive("2016-09-21T01:23:00.00Z"));
contextAttribute3.setContextMetadata(null);
NotifyContextRequest.ContextAttribute contextAttribute4 = new NotifyContextRequest.ContextAttribute();
contextAttribute4.setName("someGeoJson");
contextAttribute4.setType("geo:json");
contextAttribute4.setContextValue(new JsonPrimitive("{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}"));
contextAttribute4.setContextMetadata(null);
NotifyContextRequest.ContextAttribute contextAttribute5 = new NotifyContextRequest.ContextAttribute();
contextAttribute5.setName("someJson");
contextAttribute5.setType("json");
contextAttribute5.setContextValue(new JsonPrimitive("{\"String\": \"string\"}"));
contextAttribute5.setContextMetadata(null);
NotifyContextRequest.ContextAttribute contextAttribute6 = new NotifyContextRequest.ContextAttribute();
contextAttribute6.setName("someString");
contextAttribute6.setType("string");
contextAttribute6.setContextValue(new JsonPrimitive("foo"));
contextAttribute6.setContextMetadata(null);
NotifyContextRequest.ContextAttribute contextAttribute7 = new NotifyContextRequest.ContextAttribute();
contextAttribute7.setName("someString2");
contextAttribute7.setType("string");
contextAttribute7.setContextValue(new JsonPrimitive(""));
contextAttribute7.setContextMetadata(null);
ArrayList<NotifyContextRequest.ContextAttribute> attributes = new ArrayList<>();
attributes.add(contextAttribute1);
attributes.add(contextAttribute2);
attributes.add(contextAttribute3);
attributes.add(contextAttribute4);
attributes.add(contextAttribute5);
attributes.add(contextAttribute6);
attributes.add(contextAttribute7);
NotifyContextRequest.ContextElement contextElement = new NotifyContextRequest.ContextElement();
contextElement.setId("someId");
contextElement.setType("someType");
contextElement.setIsPattern("false");
contextElement.setAttributes(attributes);
return contextElement;
} // createContextElementForNativeTypes
@Test
public void testNativeTypeColumnBatch() throws CygnusBadConfiguration{
String attr_native_types = "true"; // default
NGSIPostgreSQLSink ngsiPostgreSQLSink = new NGSIPostgreSQLSink();
ngsiPostgreSQLSink.configure(createContextforNativeTypes("column", null, null, null, null, null, null, null, null, null, null, null, null, attr_native_types));
// Create a NGSIEvent
String timestamp = "1461136795801";
String correlatorId = "123456789";
String transactionId = "123456789";
String originalService = "someService";
String originalServicePath = "somePath";
String mappedService = "newService";
String mappedServicePath = "newPath";
String destination = "someDestination";
Map<String, String> headers = new HashMap<>();
headers.put(NGSIConstants.FLUME_HEADER_TIMESTAMP, timestamp);
headers.put(CommonConstants.HEADER_CORRELATOR_ID, correlatorId);
headers.put(NGSIConstants.FLUME_HEADER_TRANSACTION_ID, transactionId);
headers.put(CommonConstants.HEADER_FIWARE_SERVICE, originalService);
headers.put(CommonConstants.HEADER_FIWARE_SERVICE_PATH, originalServicePath);
headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE, mappedService);
headers.put(NGSIConstants.FLUME_HEADER_MAPPED_SERVICE_PATH, mappedServicePath);
NotifyContextRequest.ContextElement contextElement = createContextElementForNativeTypes();
NotifyContextRequest.ContextElement contextElement2 = createContextElement();
NGSIEvent ngsiEvent = new NGSIEvent(headers, contextElement.toString().getBytes(), contextElement, null);
NGSIEvent ngsiEvent2 = new NGSIEvent(headers, contextElement2.toString().getBytes(), contextElement2, null);
NGSIBatch batch = new NGSIBatch();
batch.addEvent(destination, ngsiEvent);
batch.addEvent(destination, ngsiEvent2);
try {
batch.startIterator();
while (batch.hasNext()) {
destination = batch.getNextDestination();
ArrayList<NGSIEvent> events = batch.getNextEvents();
NGSIGenericAggregator aggregator = ngsiPostgreSQLSink.getAggregator(false);
aggregator.setService(events.get(0).getServiceForNaming(false));
aggregator.setServicePathForData(events.get(0).getServicePathForData());
aggregator.setServicePathForNaming(events.get(0).getServicePathForNaming(false, false));
aggregator.setEntityForNaming(events.get(0).getEntityForNaming(false, false, false));
aggregator.setEntityType(events.get(0).getEntityTypeForNaming(false, false));
aggregator.setAttribute(events.get(0).getAttributeForNaming(false));
aggregator.setSchemeName(ngsiPostgreSQLSink.buildSchemaName(aggregator.getService(), aggregator.getServicePathForNaming()));
aggregator.setTableName(ngsiPostgreSQLSink.buildTableName(aggregator.getServicePathForNaming(), aggregator.getEntityForNaming(), aggregator.getEntityType(), aggregator.getAttribute()));
aggregator.setAttrNativeTypes(true);
aggregator.setAttrMetadataStore(true);
aggregator.setEnableNameMappings(true);
aggregator.setLastDataMode("insert");
aggregator.initialize(events.get(0));
for (NGSIEvent event : events) {
aggregator.aggregate(event);
}
String correctBatch = "('2016-04-20 07:19:55.801','somePath','someId','someType',2,'[]',TRUE,'[]','2016-09-21T01:23:00.00Z','[]','{\"type\": \"Point\",\"coordinates\": [-0.036177,39.986159]}','[]','{\"String\": \"string\"}','[]','foo','[]','','[]',NULL,NULL,NULL,NULL),('2016-04-20 07:19:55.801','somePath','someId','someType',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,'-3.7167, 40.3833','[{\"name\":\"location\",\"type\":\"string\",\"value\":\"WGS84\"}]','someValue2','[]')";
String valuesForInsert = SQLQueryUtils.getValuesForInsert(aggregator.getAggregationToPersist(), aggregator.isAttrNativeTypes());
if (valuesForInsert.equals(correctBatch)) {
System.out.println(getTestTraceHead("[NGSIMySQKSink.testNativeTypesColumnBatch]")
+ "- OK - NativeTypesOK");
assertTrue(true);
} else {
assertFalse(true);
}
}
} catch (Exception e) {
System.out.println(e);
assertFalse(true);
}
}
private NotifyContextRequest.ContextElement createContextElement() {
NotifyContextRequest notifyContextRequest = new NotifyContextRequest();
NotifyContextRequest.ContextMetadata contextMetadata = new NotifyContextRequest.ContextMetadata();
contextMetadata.setName("location");
contextMetadata.setType("string");
contextMetadata.setContextMetadata(new JsonPrimitive("WGS84"));
ArrayList<NotifyContextRequest.ContextMetadata> metadata = new ArrayList<>();
metadata.add(contextMetadata);
NotifyContextRequest.ContextAttribute contextAttribute1 = new NotifyContextRequest.ContextAttribute();
contextAttribute1.setName("someName1");
contextAttribute1.setType("geo:point");
contextAttribute1.setContextValue(new JsonPrimitive("-3.7167, 40.3833"));
contextAttribute1.setContextMetadata(metadata);
NotifyContextRequest.ContextAttribute contextAttribute2 = new NotifyContextRequest.ContextAttribute();
contextAttribute2.setName("someName2");
contextAttribute2.setType("someType2");
contextAttribute2.setContextValue(new JsonPrimitive("someValue2"));
contextAttribute2.setContextMetadata(null);
ArrayList<NotifyContextRequest.ContextAttribute> attributes = new ArrayList<>();
attributes.add(contextAttribute1);
attributes.add(contextAttribute2);
NotifyContextRequest.ContextElement contextElement = new NotifyContextRequest.ContextElement();
contextElement.setId("someId");
contextElement.setType("someType");
contextElement.setIsPattern("false");
contextElement.setAttributes(attributes);
return contextElement;
} // createContextElement
} // NGSIPostgreSQLSinkTest
|
package com.xpn.xwiki.plugin.ldap;
import java.io.UnsupportedEncodingException;
import java.security.Security;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.List;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.novell.ldap.LDAPAttribute;
import com.novell.ldap.LDAPAttributeSet;
import com.novell.ldap.LDAPConnection;
import com.novell.ldap.LDAPConstraints;
import com.novell.ldap.LDAPDN;
import com.novell.ldap.LDAPEntry;
import com.novell.ldap.LDAPException;
import com.novell.ldap.LDAPJSSESecureSocketFactory;
import com.novell.ldap.LDAPSearchResults;
import com.novell.ldap.LDAPSocketFactory;
import com.xpn.xwiki.XWikiContext;
/**
* LDAP communication tool.
*
* @version $Id$
* @since 1.3 M2
*/
public class XWikiLDAPConnection
{
/**
* Logging tool.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(XWikiLDAPConnection.class);
/**
* The LDAP connection.
*/
private LDAPConnection connection;
/**
* @return the {@link LDAPConnection}.
*/
public LDAPConnection getConnection()
{
return connection;
}
/**
* Open a LDAP connection.
*
* @param ldapUserName the user name to connect to LDAP server.
* @param password the password to connect to LDAP server.
* @param context the XWiki context.
* @return true if connection succeed, false otherwise.
* @throws XWikiLDAPException error when trying to open connection.
*/
public boolean open(String ldapUserName, String password, XWikiContext context) throws XWikiLDAPException
{
XWikiLDAPConfig config = XWikiLDAPConfig.getInstance();
// open LDAP
int ldapPort = config.getLDAPPort(context);
String ldapHost = config.getLDAPParam("ldap_server", "localhost", context);
// allow to use the given user and password also as the LDAP bind user and password
String bindDN = config.getLDAPBindDN(ldapUserName, password, context);
String bindPassword = config.getLDAPBindPassword(ldapUserName, password, context);
boolean bind;
if ("1".equals(config.getLDAPParam("ldap_ssl", "0", context))) {
String keyStore = config.getLDAPParam("ldap_ssl.keystore", "", context);
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Connecting to LDAP using SSL");
}
bind = open(ldapHost, ldapPort, bindDN, bindPassword, keyStore, true, context);
} else {
bind = open(ldapHost, ldapPort, bindDN, bindPassword, null, false, context);
}
return bind;
}
/**
* Open LDAP connection.
*
* @param ldapHost the host of the server to connect to.
* @param ldapPort the port of the server to connect to.
* @param loginDN the user DN to connect to LDAP server.
* @param password the password to connect to LDAP server.
* @param pathToKeys the path to SSL keystore to use.
* @param ssl if true connect using SSL.
* @param context the XWiki context.
* @return true if the connection succeed, false otherwise.
* @throws XWikiLDAPException error when trying to open connection.
*/
public boolean open(String ldapHost, int ldapPort, String loginDN, String password, String pathToKeys, boolean ssl,
XWikiContext context) throws XWikiLDAPException
{
int port = ldapPort;
if (port <= 0) {
port = ssl ? LDAPConnection.DEFAULT_SSL_PORT : LDAPConnection.DEFAULT_PORT;
}
try {
if (ssl) {
XWikiLDAPConfig config = XWikiLDAPConfig.getInstance();
// Dynamically set JSSE as a security provider
Security.addProvider(config.getSecureProvider(context));
if (pathToKeys != null && pathToKeys.length() > 0) {
// Dynamically set the property that JSSE uses to identify
// the keystore that holds trusted root certificates
System.setProperty("javax.net.ssl.trustStore", pathToKeys);
// obviously unnecessary: sun default pwd = "changeit"
// System.setProperty("javax.net.ssl.trustStorePassword", sslpwd);
}
LDAPSocketFactory ssf = new LDAPJSSESecureSocketFactory();
// Set the socket factory as the default for all future connections
// LDAPConnection.setSocketFactory(ssf);
// Note: the socket factory can also be passed in as a parameter
// to the constructor to set it for this connection only.
this.connection = new LDAPConnection(ssf);
} else {
this.connection = new LDAPConnection();
}
// connect
connect(ldapHost, port);
// set referral following
LDAPConstraints constraints = this.connection.getConstraints();
constraints.setTimeLimit(1000);
constraints.setReferralFollowing(true);
constraints.setReferralHandler(new LDAPPluginReferralHandler(loginDN, password, context));
this.connection.setConstraints(constraints);
// bind
bind(loginDN, password);
} catch (UnsupportedEncodingException e) {
throw new XWikiLDAPException("LDAP bind failed with UnsupportedEncodingException.", e);
} catch (LDAPException e) {
throw new XWikiLDAPException("LDAP bind failed with LDAPException.", e);
}
return true;
}
/**
* Connect to server.
*
* @param ldapHost the host of the server to connect to.
* @param port the port of the server to connect to.
* @throws LDAPException error when trying to connect.
*/
private void connect(String ldapHost, int port) throws LDAPException
{
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Connection to LDAP server [" + ldapHost + ":" + port + "]");
}
// connect to the server
this.connection.connect(ldapHost, port);
}
/**
* Bind to LDAP server.
*
* @param loginDN the user DN to connect to LDAP server.
* @param password the password to connect to LDAP server.
* @throws UnsupportedEncodingException error when converting provided password to UTF-8 table.
* @throws LDAPException error when trying to bind.
*/
public void bind(String loginDN, String password) throws UnsupportedEncodingException, LDAPException
{
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Binding to LDAP server with credentials login=[" + loginDN + "]");
}
// authenticate to the server
this.connection.bind(LDAPConnection.LDAP_V3, loginDN, password.getBytes("UTF8"));
}
/**
* Close LDAP connection.
*/
public void close()
{
try {
if (this.connection != null) {
this.connection.disconnect();
}
} catch (LDAPException e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("LDAP close failed.", e);
}
}
}
/**
* Check if provided password is correct provided users's password.
*
* @param userDN the user.
* @param password the password.
* @return true if the password is valid, false otherwise.
*/
public boolean checkPassword(String userDN, String password)
{
return checkPassword(userDN, password, "userPassword");
}
/**
* Check if provided password is correct provided users's password.
*
* @param userDN the user.
* @param password the password.
* @param passwordField the name of the LDAP field containing the password.
* @return true if the password is valid, false otherwise.
*/
public boolean checkPassword(String userDN, String password, String passwordField)
{
try {
LDAPAttribute attribute = new LDAPAttribute(passwordField, password);
return this.connection.compare(userDN, attribute);
} catch (LDAPException e) {
if (e.getResultCode() == LDAPException.NO_SUCH_OBJECT) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Unable to locate user_dn:" + userDN, e);
}
} else if (e.getResultCode() == LDAPException.NO_SUCH_ATTRIBUTE) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Unable to verify password because userPassword attribute not found.", e);
}
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Unable to verify password", e);
}
}
}
return false;
}
/**
* Execute a LDAP search query and return the first entry.
*
* @param baseDN the root DN from where to search.
* @param filter the LDAP filter.
* @param attr the attributes names of values to return.
* @param ldapScope the scope of the entries to search. The following are the valid options:
* <ul>
* <li>SCOPE_BASE - searches only the base DN
* <li>SCOPE_ONE - searches only entries under the base DN
* <li>SCOPE_SUB - searches the base DN and all entries within its subtree
* </ul>
* @return the found LDAP attributes.
*/
public List<XWikiLDAPSearchAttribute> searchLDAP(String baseDN, String filter, String[] attr, int ldapScope)
{
List<XWikiLDAPSearchAttribute> searchAttributeList = null;
LDAPSearchResults searchResults = null;
try {
// filter return all attributes return attrs and values time out value
searchResults = search(baseDN, filter, attr, ldapScope);
if (!searchResults.hasMore()) {
return null;
}
LDAPEntry nextEntry = searchResults.next();
String foundDN = nextEntry.getDN();
searchAttributeList = new ArrayList<XWikiLDAPSearchAttribute>();
searchAttributeList.add(new XWikiLDAPSearchAttribute("dn", foundDN));
LDAPAttributeSet attributeSet = nextEntry.getAttributeSet();
ldapToXWikiAttribute(searchAttributeList, attributeSet);
} catch (LDAPException e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("LDAP Search failed", e);
}
} finally {
if (searchResults != null) {
try {
this.connection.abandon(searchResults);
} catch (LDAPException e) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("LDAP Search clean up failed", e);
}
}
}
}
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("LDAP search found attributes: " + searchAttributeList);
}
return searchAttributeList;
}
/**
* @param baseDN the root DN from where to search.
* @param filter filter the LDAP filter
* @param attr the attributes names of values to return
* @param ldapScope the scope of the entries to search. The following are the valid options:
* <ul>
* <li>SCOPE_BASE - searches only the base DN
* <li>SCOPE_ONE - searches only entries under the base DN
* <li>SCOPE_SUB - searches the base DN and all entries within its subtree
* </ul>
* @return a result stream. LDAPConnection#abandon should be called when it's not needed anymore.
* @throws LDAPException error when searching
* @since 3.3M1
*/
public LDAPSearchResults search(String baseDN, String filter, String[] attr, int ldapScope) throws LDAPException
{
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("LDAP search: baseDN=[{}] query=[{}] attr=[{}] ldapScope=[{}]", new Object[] {baseDN,
filter, attr != null ? Arrays.asList(attr) : null, ldapScope});
}
return this.connection.search(baseDN, ldapScope, filter, attr, false);
}
/**
* Fill provided <code>searchAttributeList</code> with provided LDAP attributes.
*
* @param searchAttributeList the XWiki attributes.
* @param attributeSet the LDAP attributes.
*/
protected void ldapToXWikiAttribute(List<XWikiLDAPSearchAttribute> searchAttributeList,
LDAPAttributeSet attributeSet)
{
for (LDAPAttribute attribute : (Set<LDAPAttribute>) attributeSet) {
String attributeName = attribute.getName();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(" - values for attribute \"" + attributeName + "\"");
}
Enumeration<String> allValues = attribute.getStringValues();
if (allValues != null) {
while (allValues.hasMoreElements()) {
String value = allValues.nextElement();
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(" |- [" + value + "]");
}
searchAttributeList.add(new XWikiLDAPSearchAttribute(attributeName, value));
}
}
}
}
/**
* Fully escape DN value (the part after the =).
* <p>
* For example, for the dn value "Acme, Inc", the escapeLDAPDNValue method returns "Acme\, Inc".
* </p>
*
* @param value the DN value to escape
* @return the escaped version o the DN value
*/
public static String escapeLDAPDNValue(String value)
{
return StringUtils.isBlank(value) ? value : LDAPDN.escapeRDN("key=" + value).substring(4);
}
/**
* Escape part of a LDAP query filter.
*
* @param value the value to escape
* @return the escaped version
*/
public static String escapeLDAPSearchFilter(String value)
{
if (value == null) {
return null;
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < value.length(); i++) {
char curChar = value.charAt(i);
switch (curChar) {
case '\\':
sb.append("\\5c");
break;
case '*':
sb.append("\\2a");
break;
case '(':
sb.append("\\28");
break;
case ')':
sb.append("\\29");
break;
case '\u0000':
sb.append("\\00");
break;
default:
sb.append(curChar);
}
}
return sb.toString();
}
}
|
package solutions.alterego.androidbound.android.adapters;
import android.support.v4.util.Pair;
import android.support.v7.util.DiffUtil;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.StaggeredGridLayoutManager;
import android.util.SparseArray;
import android.view.ViewGroup;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import lombok.Getter;
import lombok.Setter;
import lombok.experimental.Accessors;
import rx.Observable;
import rx.Subscription;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
import rx.subscriptions.CompositeSubscription;
import rx.subscriptions.Subscriptions;
import solutions.alterego.androidbound.interfaces.IViewBinder;
import static android.support.v7.util.DiffUtil.calculateDiff;
@Accessors(prefix = "m")
public class BindableRecyclerViewAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private final IViewBinder mViewBinder;
@Getter
private int mItemTemplate;
@Getter
private Map<Class<?>, Integer> mTemplatesForObjects = new HashMap<>();
@Getter
private List mItemsSource = new ArrayList<>();
private SparseArray<Class<?>> mObjectIndex;
@Getter
@Setter
private RecyclerView.LayoutManager mLayoutManager;
private Subscription mSetValuesSubscription = Subscriptions.unsubscribed();
private Subscription mRemoveItemsSubscription = Subscriptions.unsubscribed();
private CompositeSubscription mPageSubscriptions = new CompositeSubscription();
private Queue<List<?>> pendingUpdates =
new ArrayDeque<>();
public BindableRecyclerViewAdapter(IViewBinder vb, int itemTemplate) {
mViewBinder = vb;
mItemTemplate = itemTemplate;
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
Class<?> clazz = mObjectIndex.get(viewType);
int layoutRes = mItemTemplate;
if (clazz != null && mTemplatesForObjects.containsKey(clazz)) {
layoutRes = mTemplatesForObjects.get(clazz);
mViewBinder.getLogger().verbose(
"BindableRecyclerViewAdapter creating VH for viewType = " + viewType + " i.e. class = " + clazz
+ " using layoutRes = "
+ layoutRes);
} else if (layoutRes != 0) {
mViewBinder.getLogger().verbose("BindableRecyclerViewAdapter creating VH using layoutRes = " + layoutRes);
} else {
mViewBinder.getLogger().error("BindableRecyclerViewAdapter cannot find templates for class = " + clazz
+ ": did you call setTemplatesForObjects or set itemTemplate in XML?");
}
return new BindableRecyclerViewItemViewHolder(
mViewBinder.inflate(parent.getContext(), null, layoutRes, parent, false),
mViewBinder, parent);
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
if (holder instanceof BindableRecyclerViewItemViewHolder) {
if (getLayoutManager() instanceof StaggeredGridLayoutManager) {
((BindableRecyclerViewItemViewHolder) holder)
.onBindViewHolder(getItemsSource().get(position), getLayoutManager());
} else {
((BindableRecyclerViewItemViewHolder) holder).onBindViewHolder(getItemsSource().get(position));
}
}
}
@Override
public int getItemCount() {
return getItemsSource() != null ? getItemsSource().size() : 0;
}
@Override
public int getItemViewType(int position) {
Object obj = getItemsSource().get(position);
int viewType = mObjectIndex.indexOfValue(obj.getClass());
mViewBinder.getLogger().verbose(
"BindableRecyclerViewAdapter getItemViewType viewType = " + viewType + " i.e. class = " + obj.getClass()
.toString()
+ " for position = " + position);
return viewType;
}
public void setItemsSource(final List<?> value) {
final List<?> oldItems = new ArrayList<>(mItemsSource);
mSetValuesSubscription.unsubscribe();
mSetValuesSubscription = Observable.just(value)
.subscribeOn(Schedulers.computation())
.map(newList -> new Pair<List<?>, DiffUtil.DiffResult>(newList, calculateDiff(new ItemSourceDiffCallback(oldItems, value))))
.observeOn(AndroidSchedulers.mainThread())
.subscribe(this::applyDiffResult, Throwable::printStackTrace);
}
public void addItemsSource(List<?> values) {
if (values == null) {
if (mItemsSource != null) {
int size = mItemsSource.size();
mItemsSource = null;
postNotifyItemRangeRemoved(0, size);
}
return;
}
if (mItemsSource == null) {
mItemsSource = new ArrayList<>();
}
Subscription s = Observable.from(values)
.filter(value -> value != null)
.subscribe(value -> {
boolean contains = mItemsSource.contains(value);
if (contains) {
int index = mItemsSource.indexOf(value);
mItemsSource.set(index, value);
notifyItemChanged(index);
} else if (mItemsSource.add(value)) {
notifyItemInserted(mItemsSource.size() - 1);
}
});
mPageSubscriptions.add(s);
}
private void applyDiffResult(Pair<List<?>, DiffUtil.DiffResult> resultPair) {
if (!pendingUpdates.isEmpty()) {
pendingUpdates.remove();
}
mItemsSource.clear();
if (resultPair.first != null) {
mItemsSource.addAll(new ArrayList<>(resultPair.first));
}
resultPair.second.dispatchUpdatesTo(this);
if (pendingUpdates.size() > 0) {
setItemsSource(pendingUpdates.peek());
}
}
/* to prevent Cannot call this method in a scroll callback. Scroll callbacks might be run during a measure
& layout pass where you cannot change the RecyclerView data. Any method call that might change the structure
of the RecyclerView or the adapter contents should be postponed to the next frame.*/
private void postNotifyItemRangeRemoved(final int start, final int itemCount) {
AndroidSchedulers.mainThread().createWorker().schedule(() -> notifyItemRangeRemoved(start, itemCount));
}
public void setTemplatesForObjects(Map<Class<?>, Integer> templatesForObjects) {
if (mTemplatesForObjects == null) {
return;
}
mTemplatesForObjects = templatesForObjects;
mObjectIndex = new SparseArray<>();
Class<?>[] classes = mTemplatesForObjects.keySet().toArray(new Class[mTemplatesForObjects.keySet().size()]);
for (int index = 0; index < classes.length; index++) {
mObjectIndex.put(index, classes[index]);
}
if (mItemsSource != null) {
notifyDataSetChanged();
}
}
public void removeItems(final List<?> value) {
if (mItemsSource == null) {
return;
}
List<?> tmp = new ArrayList<>(mItemsSource);
mRemoveItemsSubscription.unsubscribe();
mRemoveItemsSubscription = Observable.just(tmp)
.subscribeOn(Schedulers.computation())
.map(list -> {
list.removeAll(value);
return list;
})
.map(list -> new Pair<List, DiffUtil.DiffResult>(list,
calculateDiff(new ItemSourceDiffCallback(mItemsSource, list), true)))
.observeOn(AndroidSchedulers.mainThread())
.subscribe(pair -> {
if (pair.first != null && mItemsSource != null) {
mItemsSource.clear();
mItemsSource.addAll(pair.first);
}
pair.second.dispatchUpdatesTo(this);
}, throwable -> {
notifyDataSetChanged();
});
}
@Override
public void onDetachedFromRecyclerView(RecyclerView recyclerView) {
super.onDetachedFromRecyclerView(recyclerView);
mRemoveItemsSubscription.unsubscribe();
mSetValuesSubscription.unsubscribe();
mPageSubscriptions.unsubscribe();
}
}
|
package com.sequenceiq.sdx.api.model;
public enum SdxClusterStatusResponse {
REQUESTED,
WAIT_FOR_ENVIRONMENT,
ENVIRONMENT_CREATED,
STACK_CREATION_IN_PROGRESS,
STACK_CREATION_FINISHED,
STACK_DELETED,
STACK_DELETION_IN_PROGRESS,
EXTERNAL_DATABASE_CREATION_IN_PROGRESS,
EXTERNAL_DATABASE_DELETION_IN_PROGRESS,
EXTERNAL_DATABASE_CREATED,
RUNNING,
PROVISIONING_FAILED,
REPAIR_IN_PROGRESS,
REPAIR_FAILED,
CHANGE_IMAGE_IN_PROGRESS,
UPGRADE_IN_PROGRESS,
UPGRADE_FAILED,
DELETE_REQUESTED,
DELETED,
DELETE_FAILED,
START_REQUESTED,
START_IN_PROGRESS,
START_FAILED,
STOP_REQUESTED,
STOP_IN_PROGRESS,
STOP_FAILED,
STOPPED,
CLUSTER_AMBIGUOUS,
SYNC_FAILED
}
|
package io.spine.server.storage.datastore.type;
import com.google.cloud.datastore.BaseEntity;
import com.google.cloud.datastore.Entity;
import com.google.cloud.datastore.Key;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Timestamp;
import io.spine.core.Version;
import io.spine.core.Versions;
import io.spine.json.Json;
import io.spine.server.storage.datastore.given.TestDatastores;
import io.spine.test.storage.Project;
import io.spine.testdata.Sample;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import static com.google.cloud.Timestamp.ofTimeSecondsAndNanos;
import static com.google.common.truth.Truth.assertThat;
import static io.spine.base.Time.currentTime;
import static io.spine.server.storage.datastore.type.DsColumnTypes.timestampType;
import static io.spine.testing.DisplayNames.HAVE_PARAMETERLESS_CTOR;
import static io.spine.testing.Tests.assertHasPrivateParameterlessCtor;
@DisplayName("DsColumnTypes should")
class DsColumnTypesTest {
private static final String COLUMN_LABEL = "some-column";
private BaseEntity.Builder<Key, Entity.Builder> entityBuilder;
@BeforeEach
void setUp() {
entityBuilder = entityBuilder();
}
@Test
@DisplayName(HAVE_PARAMETERLESS_CTOR)
void testPrivateCtor() {
assertHasPrivateParameterlessCtor(DsColumnTypes.class);
}
@Test
@DisplayName("provide simple string type")
void testString() {
SimpleDatastoreColumnType<String> type = DsColumnTypes.stringType();
String value = "some string";
setSimpleType(type, value);
BaseEntity<Key> entity = entityBuilder.build();
String entityField = entity.getString(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(value);
}
@Test
@DisplayName("provide simple int type")
void testInt() {
SimpleDatastoreColumnType<Integer> type = DsColumnTypes.integerType();
int value = 42;
setSimpleType(type, value);
BaseEntity<Key> entity = entityBuilder.build();
long entityField = entity.getLong(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(value);
}
@Test
@DisplayName("provide simple float type")
void testFloat() {
SimpleDatastoreColumnType<Float> type = DsColumnTypes.floatType();
float value = 3.14f;
setSimpleType(type, value);
BaseEntity<Key> entity = entityBuilder.build();
double entityField = entity.getDouble(COLUMN_LABEL);
assertThat(entityField)
.isWithin(0.01)
.of(value);
}
@Test
@DisplayName("provide simple double type")
void testDouble() {
SimpleDatastoreColumnType<Double> type = DsColumnTypes.doubleType();
double value = 2.718281828459045;
setSimpleType(type, value);
BaseEntity<Key> entity = entityBuilder.build();
double entityField = entity.getDouble(COLUMN_LABEL);
assertThat(entityField)
.isWithin(0.01)
.of(value);
}
@Test
@DisplayName("provide simple long type")
void testLong() {
SimpleDatastoreColumnType<Long> type = DsColumnTypes.longType();
long value = 42L;
setSimpleType(type, value);
BaseEntity<Key> entity = entityBuilder.build();
long entityField = entity.getLong(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(value);
}
@Test
@DisplayName("provide simple boolean type")
void testBoolean() {
SimpleDatastoreColumnType<Boolean> type = DsColumnTypes.booleanType();
boolean value = true;
setSimpleType(type, value);
BaseEntity<Key> entity = entityBuilder.build();
boolean entityField = entity.getBoolean(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(value);
}
@Test
@DisplayName("provide Timestamp as DateTime type")
void testTimestampToDateTime() {
DatastoreColumnType<Timestamp, com.google.cloud.Timestamp> type = timestampType();
Timestamp value = currentTime();
com.google.cloud.Timestamp timestamp = ofTimeSecondsAndNanos(value.getSeconds(),
value.getNanos());
setDatastoreType(type, value, timestamp);
BaseEntity<Key> entity = entityBuilder.build();
com.google.cloud.Timestamp entityField = entity.getTimestamp(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(timestamp);
}
@Test
@DisplayName("provide Version as int type")
void testVersionToInt() {
DatastoreColumnType<Version, Integer> type = DsColumnTypes.versionType();
Version value = Versions.zero();
value = Versions.increment(value);
int number = 1;
setDatastoreType(type, value, number);
BaseEntity<Key> entity = entityBuilder.build();
long entityField = entity.getLong(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(number);
}
@Test
@DisplayName("provide Message as String type")
void testMessageToString() {
DatastoreColumnType<AbstractMessage, String> type = DsColumnTypes.messageType();
AbstractMessage value = Sample.messageOfType(Project.class);
String stringMessage = Json.toCompactJson(value); // default Stringifier behavior
setDatastoreType(type, value, stringMessage);
BaseEntity<Key> entity = entityBuilder.build();
String entityField = entity.getString(COLUMN_LABEL);
assertThat(entityField)
.isEqualTo(stringMessage);
}
@Test
@DisplayName("set null value")
void testNull() {
SimpleDatastoreColumnType<Boolean> type = DsColumnTypes.booleanType();
type.setNull(entityBuilder, COLUMN_LABEL);
BaseEntity<Key> entity = entityBuilder.build();
String entityField = entity.getString(COLUMN_LABEL);
assertThat(entityField)
.isNull();
}
private <T> void setSimpleType(SimpleDatastoreColumnType<T> type, T value) {
T storedValue = type.convertColumnValue(value);
assertThat(storedValue)
.isEqualTo(value);
type.setColumnValue(entityBuilder, storedValue, COLUMN_LABEL);
}
private <J, S> void setDatastoreType(DatastoreColumnType<J, S> type,
J value,
S expectedStoredValue) {
S storedValue = type.convertColumnValue(value);
assertThat(storedValue)
.isEqualTo(expectedStoredValue);
type.setColumnValue(entityBuilder, storedValue, COLUMN_LABEL);
}
private static BaseEntity.Builder<Key, Entity.Builder> entityBuilder() {
String projectId = TestDatastores.projectId()
.value();
Key key = Key.newBuilder(projectId, "some-entity-kind", "some-name")
.build();
Entity.Builder builder = Entity.newBuilder(key);
return builder;
}
}
|
package org.openhab.binding.rme.handler;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.Enumeration;
import java.util.TooManyListenersException;
import org.apache.commons.io.IOUtils;
import org.eclipse.smarthome.core.thing.ChannelUID;
import org.eclipse.smarthome.core.thing.Thing;
import org.eclipse.smarthome.core.thing.ThingStatus;
import org.eclipse.smarthome.core.thing.ThingStatusDetail;
import org.eclipse.smarthome.core.thing.binding.BaseThingHandler;
import org.eclipse.smarthome.core.types.Command;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import gnu.io.CommPortIdentifier;
import gnu.io.PortInUseException;
import gnu.io.SerialPort;
import gnu.io.SerialPortEvent;
import gnu.io.SerialPortEventListener;
import gnu.io.UnsupportedCommOperationException;
/**
* The {@link SerialThingHandler} is responsible for handling commands, which
* are sent to one of the channels. Thing Handler classes that use serial
* communications can extend/implement this class, but must make sure they
* supplement the configuration parameters into the {@link SerialConfiguration}
* Configuration of the underlying Thing, if not already specified in the
* thing.xml definition
*
* @author Karel Goderis - Initial contribution
*/
public abstract class SerialThingHandler extends BaseThingHandler implements SerialPortEventListener {
// List of all Configuration parameters
public static final String PORT = "port";
public static final String BAUD_RATE = "baud";
public static final String BUFFER_SIZE = "buffer";
private Logger logger = LoggerFactory.getLogger(SerialThingHandler.class);
private SerialPort serialPort;
private CommPortIdentifier portId;
private InputStream inputStream;
private OutputStream outputStream;
protected int baud;
protected String port;
protected int bufferSize;
public SerialThingHandler(Thing thing) {
super(thing);
}
/**
* Called when data is received on the serial port
*
* @param line
* - the received data as a String
*
**/
abstract public void onDataReceived(String line);
/**
* Write data to the serial port
*
* @param msg
* - the received data as a String
*
**/
public void writeString(String msg) {
String port = (String) this.getConfig().get(PORT);
try {
// write string to serial port
outputStream.write(msg.getBytes());
outputStream.flush();
} catch (IOException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR,
"Error writing '" + msg + "' to serial port " + port + " : " + e.getMessage());
}
}
@Override
public void serialEvent(SerialPortEvent event) {
switch (event.getEventType()) {
case SerialPortEvent.BI:
case SerialPortEvent.OE:
case SerialPortEvent.FE:
case SerialPortEvent.PE:
case SerialPortEvent.CD:
case SerialPortEvent.CTS:
case SerialPortEvent.DSR:
case SerialPortEvent.RI:
case SerialPortEvent.OUTPUT_BUFFER_EMPTY:
break;
case SerialPortEvent.DATA_AVAILABLE:
try {
BufferedReader br = new BufferedReader(new InputStreamReader(inputStream), bufferSize);
while (br.ready()) {
String line = br.readLine();
logger.debug("Receving '{}' on '{}'", line, getConfig().get(PORT));
onDataReceived(line);
}
} catch (IOException e) {
String port = (String) getConfig().get(PORT);
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.COMMUNICATION_ERROR,
"Error receiving data on serial port " + port + " : " + e.getMessage());
}
break;
}
}
@Override
public void dispose() {
logger.debug("Disposing serial thing handler.");
IOUtils.closeQuietly(inputStream);
IOUtils.closeQuietly(outputStream);
if (serialPort != null) {
serialPort.close();
}
}
@Override
public void initialize() {
logger.debug("Initializing serial thing handler.");
if (serialPort == null && port != null && baud != 0) {
// parse ports and if the default port is found, initialized the
// reader
@SuppressWarnings("rawtypes")
Enumeration portList = CommPortIdentifier.getPortIdentifiers();
while (portList.hasMoreElements()) {
CommPortIdentifier id = (CommPortIdentifier) portList.nextElement();
if (id.getPortType() == CommPortIdentifier.PORT_SERIAL) {
if (id.getName().equals(port)) {
logger.debug("Serial port '{}' has been found.", port);
portId = id;
}
}
}
if (portId != null) {
// initialize serial port
try {
serialPort = portId.open("openHAB", 2000);
} catch (PortInUseException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR,
"Could not open serial port " + serialPort + ": " + e.getMessage());
return;
}
try {
inputStream = serialPort.getInputStream();
} catch (IOException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR,
"Could not open serial port " + serialPort + ": " + e.getMessage());
return;
}
try {
serialPort.addEventListener(this);
} catch (TooManyListenersException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR,
"Could not open serial port " + serialPort + ": " + e.getMessage());
return;
}
// activate the DATA_AVAILABLE notifier
serialPort.notifyOnDataAvailable(true);
try {
// set port parameters
serialPort.setSerialPortParams(baud, SerialPort.DATABITS_8, SerialPort.STOPBITS_1,
SerialPort.PARITY_NONE);
} catch (UnsupportedCommOperationException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR,
"Could not configure serial port " + serialPort + ": " + e.getMessage());
return;
}
try {
// get the output stream
outputStream = serialPort.getOutputStream();
updateStatus(ThingStatus.ONLINE);
} catch (IOException e) {
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR,
"Could not communicate with the serial port " + serialPort + ": " + e.getMessage());
return;
}
} else {
StringBuilder sb = new StringBuilder();
portList = CommPortIdentifier.getPortIdentifiers();
while (portList.hasMoreElements()) {
CommPortIdentifier id = (CommPortIdentifier) portList.nextElement();
if (id.getPortType() == CommPortIdentifier.PORT_SERIAL) {
sb.append(id.getName() + "\n");
}
}
logger.error("Serial port '" + port + "' could not be found. Available ports are:\n" + sb.toString());
updateStatus(ThingStatus.OFFLINE, ThingStatusDetail.CONFIGURATION_ERROR);
}
}
}
@Override
public void handleCommand(ChannelUID channelUID, Command command) {
// by default, we write anything we received as a string to the serial
// port
writeString(command.toString());
}
}
|
package org.csstudio.trends.databrowser.ui;
import java.util.ArrayList;
import java.util.Timer;
import java.util.TimerTask;
import org.csstudio.apputil.ui.dialog.ErrorDetailDialog;
import org.csstudio.platform.data.ITimestamp;
import org.csstudio.platform.data.TimestampFactory;
import org.csstudio.platform.logging.CentralLogger;
import org.csstudio.platform.model.IArchiveDataSource;
import org.csstudio.swt.xygraph.undo.OperationsManager;
import org.csstudio.trends.databrowser.Messages;
import org.csstudio.trends.databrowser.archive.ArchiveFetchJob;
import org.csstudio.trends.databrowser.archive.ArchiveFetchJobListener;
import org.csstudio.trends.databrowser.model.ArchiveDataSource;
import org.csstudio.trends.databrowser.model.ArchiveRescale;
import org.csstudio.trends.databrowser.model.AxisConfig;
import org.csstudio.trends.databrowser.model.Model;
import org.csstudio.trends.databrowser.model.ModelItem;
import org.csstudio.trends.databrowser.model.ModelListener;
import org.csstudio.trends.databrowser.model.PVItem;
import org.csstudio.trends.databrowser.preferences.Preferences;
import org.csstudio.trends.databrowser.propsheet.AddArchiveCommand;
import org.csstudio.trends.databrowser.propsheet.AddAxisCommand;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.osgi.util.NLS;
import org.eclipse.swt.events.ShellEvent;
import org.eclipse.swt.events.ShellListener;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Shell;
/** Controller that interfaces the {@link Model} with the {@link Plot}:
* <ul>
* <li>For each item in the Model, create a trace in the plot.
* <li>Perform scrolling of the time axis.
* <li>When the plot is interactively zoomed, update the Model's time range.
* <li>Get archived data whenever the time axis changes.
* </ul>
* @author Kay Kasemir
*/
public class Controller implements ArchiveFetchJobListener
{
/** Optional shell used to track shell state */
final private Shell shell;
/** Display used for dialog boxes etc. */
final private Display display;
/** Model with data to display */
final Model model;
/** GUI for displaying the data */
final Plot plot;
/** Timer that triggers scrolling or trace redraws */
final Timer update_timer = new Timer("Update Timer", true); //$NON-NLS-1$
/** Task executed by update_timer */
private TimerTask update_task = null;
/** Was scrolling off, i.e. we have not scrolled for a while? */
private boolean scrolling_was_off = true;
/** Delay to avoid flurry of archive requests
* @see #scheduleArchiveRetrieval(ITimestamp, ITimestamp)
*/
final private long archive_fetch_delay = Preferences.getArchiveFetchDelay();
/** Delayed task to avoid flurry of archive requests
* @see #scheduleArchiveRetrieval(ITimestamp, ITimestamp)
*/
private TimerTask archive_fetch_delay_task = null;
/** Currently active archive jobs, used to prevent multiple requests
* for the same model item.
*/
final private ArrayList<ArchiveFetchJob> archive_fetch_jobs =
new ArrayList<ArchiveFetchJob>();
/** Is the window (shell) iconized? */
private volatile boolean window_is_iconized = false;
/** Should we perform redraws, or is the window hidden and we should suppress them? */
private boolean suppress_redraws = false;
/** Is there any Y axis that's auto-scaled? */
private volatile boolean have_autoscale_axis = false;
/** Initialize
* @param shell Shell
* @param model Model that has the data
* @param plot Plot for displaying the Model
* @throws Error when called from non-UI thread
*/
public Controller(final Shell shell, final Model model, final Plot plot)
{
this.shell = shell;
this.model = model;
this.plot = plot;
if (shell == null)
{
display = Display.getCurrent();
if (display == null)
throw new Error("Must be called from UI thread"); //$NON-NLS-1$
}
else
{
display = shell.getDisplay();
// Update 'iconized' state from shell
shell.addShellListener(new ShellListener()
{
@Override
public void shellIconified(ShellEvent e)
{
window_is_iconized = true;
}
@Override
public void shellDeiconified(ShellEvent e)
{
window_is_iconized = false;
}
@Override
public void shellDeactivated(ShellEvent e) { /* Ignore */ }
@Override
public void shellClosed(ShellEvent e) { /* Ignore */ }
@Override
public void shellActivated(ShellEvent e) { /* Ignore */ }
});
window_is_iconized = shell.getMinimized();
}
checkAutoscaleAxes();
createPlotTraces();
// Listen to user input from Plot UI, update model
plot.addListener(new PlotListener()
{
@Override
public void scrollRequested(final boolean enable_scrolling)
{
model.enableScrolling(enable_scrolling);
}
@Override
public void timeConfigRequested()
{
StartEndTimeAction.run(shell, model, plot.getOperationsManager());
}
@Override
public void timeAxisChanged(final long start_ms, final long end_ms)
{
if (model.isScrollEnabled())
{
final long dist = Math.abs(end_ms - System.currentTimeMillis());
final long range = end_ms - start_ms;
// Iffy range?
if (range <= 0)
return;
// In scroll mode, if the end time selected by the user via
// the GUI is close enough to 'now', scrolling remains 'on'
// and we'll continue to scroll with the new time range.
if (dist * 100 / range > 10)
{ // Time range 10% away from 'now', disable scrolling
model.enableScrolling(false);
}
else if (Math.abs(100*(range - (long)(model.getTimespan()*1000))/range) <= 1)
{
// We're still scrolling, and the time span didn't really
// change, i.e. it's within 1% of the model's span: Ignore.
// This happens when scrolling moved the time axis around,
// the user zoomed vertically, and the plot now tells
// us about a new time range that resulted from scrolling.
return;
}
}
final ITimestamp start_time = TimestampFactory.fromMillisecs(start_ms);
final ITimestamp end_time = TimestampFactory.fromMillisecs(end_ms);
// Update model's time range
model.setTimerange(start_time, end_time);
// Controller's ModelListener will fetch new archived data
}
@Override
public void valueAxisChanged(final int index, final double lower, final double upper)
{ // Update axis range in model
final AxisConfig axis = model.getAxis(index);
axis.setRange(lower, upper);
}
@Override
public void droppedName(final String name)
{
// Offer potential PV name in dialog so user can edit/cancel
final AddPVAction add = new AddPVAction(plot.getOperationsManager(), shell, model, false);
// Allow passing in many names, assuming that white space separates them
final String[] names = name.split("[\\r\\n\\t ]+"); //$NON-NLS-1$
for (String n : names)
if (! add.runWithSuggestedName(n, null))
break;
}
@Override
public void droppedPVName(final String name, final IArchiveDataSource archive)
{
if (name == null)
{
if (archive == null)
return;
// Received only an archive. Add to all PVs
final ArchiveDataSource arch = new ArchiveDataSource(archive);
for (int i=0; i<model.getItemCount(); ++i)
{
if (! (model.getItem(i) instanceof PVItem))
continue;
final PVItem pv = (PVItem) model.getItem(i);
if (pv.hasArchiveDataSource(arch))
continue;
new AddArchiveCommand(plot.getOperationsManager(), pv, arch);
}
}
else
{ // Received PV name
final ModelItem item = model.getItem(name);
if (item == null)
{
final OperationsManager operations_manager = plot.getOperationsManager();
// Add to first empty axis, or create new axis
AxisConfig axis = model.getEmptyAxis();
if (axis == null)
axis = new AddAxisCommand(operations_manager, model).getAxis();
// Add new PV
AddModelItemCommand.forPV(shell, operations_manager,
model, name, Preferences.getScanPeriod(),
axis, archive);
return;
}
if (archive == null || ! (item instanceof PVItem))
{ // Duplicate PV, or a formula to which we cannot add archives
MessageDialog.openError(shell, Messages.Error,
NLS.bind(Messages.DuplicateItemFmt, name));
return;
}
// Add archive to existing PV
if (item instanceof PVItem)
new AddArchiveCommand(plot.getOperationsManager(),
(PVItem) item, new ArchiveDataSource(archive));
}
}
});
// Listen to Model changes, update Plot
model.addListener(new ModelListener()
{
@Override
public void changedUpdatePeriod()
{
if (update_task != null)
createUpdateTask();
}
@Override
public void changedArchiveRescale()
{
// NOP
}
@Override
public void changedColors()
{
plot.setBackgroundColor(model.getPlotBackground());
}
@Override
public void changedTimerange()
{
// Get matching archived data
scheduleArchiveRetrieval();
// Show new time range on plot?
if (model.isScrollEnabled())
return; // no, scrolling will handle that
// Yes, since the time axis is currently 'fixed'
final long start_ms = (long) (model.getStartTime().toDouble()*1000);
final long end_ms = (long) (model.getEndTime().toDouble()*1000);
plot.setTimeRange(start_ms, end_ms);
}
@Override
public void changedAxis(final AxisConfig axis)
{
checkAutoscaleAxes();
if (axis == null)
{
// New or removed axis: Recreate the whole plot
createPlotTraces();
return;
}
// Else: Update specific axis
for (int i=0; i<model.getAxisCount(); ++i)
{
if (model.getAxis(i) == axis)
{
plot.updateAxis(i, axis);
return;
}
}
}
@Override
public void itemAdded(final ModelItem item)
{
if (item.isVisible())
plot.addTrace(item);
// Get archived data for new item (NOP for non-PVs)
getArchivedData(item, model.getStartTime(), model.getEndTime());
}
@Override
public void itemRemoved(final ModelItem item)
{
if (item.isVisible())
plot.removeTrace(item);
}
@Override
public void changedItemVisibility(final ModelItem item)
{ // Add/remove from plot, but don't need to get archived data
if (item.isVisible())
// itemAdded(item) would also get archived data
plot.addTrace(item);
else
plot.removeTrace(item);
}
@Override
public void changedItemLook(final ModelItem item)
{
plot.updateTrace(item);
}
@Override
public void changedItemDataConfig(final PVItem item)
{
getArchivedData(item, model.getStartTime(), model.getEndTime());
}
@Override
public void scrollEnabled(final boolean scroll_enabled)
{
plot.updateScrollButton(scroll_enabled);
}
});
}
/** @param suppress_redraws <code>true</code> if controller should suppress
* redraws because window is hidden
*/
public void suppressRedraws(final boolean suppress_redraws)
{
if (this.suppress_redraws == suppress_redraws)
return;
this.suppress_redraws = suppress_redraws;
if (!suppress_redraws)
plot.redrawTraces();
}
/** Check if there's any axis in 'auto scale' mode.
* @see #have_autoscale_axis
*/
private void checkAutoscaleAxes()
{
have_autoscale_axis = false;
for (int i=0; i<model.getAxisCount(); ++i)
if (model.getAxis(i).isAutoScale())
{
have_autoscale_axis = true;
break;
}
}
/** When the user moves the time axis around, archive requests for the
* new time range are delayed to avoid a flurry of archive
* requests while the user is still moving around:
*/
protected void scheduleArchiveRetrieval()
{
if (archive_fetch_delay_task != null)
archive_fetch_delay_task.cancel();
archive_fetch_delay_task = new TimerTask()
{
@Override
public void run()
{
getArchivedData();
}
};
update_timer.schedule(archive_fetch_delay_task, archive_fetch_delay);
}
/** Start model items and initiate scrolling/updates
* @throws Exception on error: Already running, problem starting threads, ...
* @see #isRunning()
*/
public void start() throws Exception
{
if (isRunning())
throw new IllegalStateException("Already started"); //$NON-NLS-1$
createUpdateTask();
model.start();
// In scroll mode, the first scroll will update the plot and get data
if (model.isScrollEnabled())
return;
// In non-scroll mode, initialize plot's time range and get data
plot.setTimeRange(model.getStartTime(), model.getEndTime());
getArchivedData();
}
/** @return <code>true</code> while running
* @see #stop()
*/
public boolean isRunning()
{
return update_task != null;
}
/** Create or re-schedule update task
* @see #start()
*/
private void createUpdateTask()
{
// Can't actually re-schedule, so stop one that might already be running
if (update_task != null)
{
update_task.cancel();
update_task = null;
}
update_task = new TimerTask()
{
@Override
public void run()
{
try
{
// Skip updates while nobody is watching
if (window_is_iconized || suppress_redraws)
return;
// Check if anything changed, which also updates formulas
final boolean anything_new = model.updateItemsAndCheckForNewSamples();
if (anything_new && have_autoscale_axis )
plot.updateAutoscale();
if (model.isScrollEnabled())
performScroll();
else
{
scrolling_was_off = true;
// Only redraw when needed
if (anything_new)
plot.redrawTraces();
}
}
catch (Throwable ex)
{
CentralLogger.getInstance().getLogger(this).
error("Error in timer task", ex); //$NON-NLS-1$
ex.printStackTrace();
}
}
};
final long update_delay = (long) (model.getUpdatePeriod() * 1000);
update_timer.schedule(update_task, update_delay, update_delay);
}
public void stop()
{
if (! isRunning())
throw new IllegalStateException("Not started"); //$NON-NLS-1$
// Stop ongoing archive access
synchronized (archive_fetch_jobs)
{
for (ArchiveFetchJob job : archive_fetch_jobs)
job.cancel();
archive_fetch_jobs.clear();
}
// Stop update task
model.stop();
update_task.cancel();
update_task = null;
}
/** (Re-) create traces in plot for each item in the model */
public void createPlotTraces()
{
plot.setBackgroundColor(model.getPlotBackground());
plot.updateScrollButton(model.isScrollEnabled());
plot.removeAll();
for (int i=0; i<model.getAxisCount(); ++i)
plot.updateAxis(i, model.getAxis(i));
for (int i=0; i<model.getItemCount(); ++i)
{
final ModelItem item = model.getItem(i);
if (item.isVisible())
plot.addTrace(item);
}
}
/** Scroll the plot to 'now' */
protected void performScroll()
{
if (! model.isScrollEnabled())
return;
final long end_ms = System.currentTimeMillis();
final long start_ms = end_ms - (long) (model.getTimespan()*1000);
plot.setTimeRange(start_ms, end_ms);
if (scrolling_was_off)
{ // Scrolling was just turned on.
// Get new archived data since the new time scale
// could be way off what's in the previous time range.
scrolling_was_off = false;
getArchivedData();
}
}
/** Initiate archive data retrieval for all model items
* @param start Start time
* @param end End time
*/
private void getArchivedData()
{
final ITimestamp start = model.getStartTime();
final ITimestamp end = model.getEndTime();
for (int i=0; i<model.getItemCount(); ++i)
getArchivedData(model.getItem(i), start, end);
}
/** Initiate archive data retrieval for a specific model item
* @param item Model item. NOP for non-PVItem
* @param start Start time
* @param end End time
*/
private void getArchivedData(final ModelItem item,
final ITimestamp start, final ITimestamp end)
{
// Only useful for PVItems with archive data source
if (!(item instanceof PVItem))
return;
final PVItem pv_item = (PVItem) item;
if (pv_item.getArchiveDataSources().length <= 0)
return;
ArchiveFetchJob job;
// Stop ongoing jobs for this item
synchronized (archive_fetch_jobs)
{
for (int i=0; i<archive_fetch_jobs.size(); ++i)
{
job = archive_fetch_jobs.get(i);
if (job.getPVItem() != pv_item)
continue;
// System.out.println("Request for " + item.getName() + " cancels " + job);
job.cancel();
archive_fetch_jobs.remove(job);
}
// Start new job
job = new ArchiveFetchJob(pv_item, start, end, this);
archive_fetch_jobs.add(job);
}
job.schedule();
}
/** @see ArchiveFetchJobListener */
@Override
public void fetchCompleted(final ArchiveFetchJob job)
{
synchronized (archive_fetch_jobs)
{
archive_fetch_jobs.remove(job);
// System.out.println("Completed " + job + ", " + archive_fetch_jobs.size() + " left");
if (!archive_fetch_jobs.isEmpty())
return;
}
// All completed. Do something to the plot?
final ArchiveRescale rescale = model.getArchiveRescale();
if (rescale == ArchiveRescale.NONE)
return;
if (display == null || display.isDisposed())
return;
display.asyncExec(new Runnable()
{
@Override
public void run()
{
if (display.isDisposed())
return;
switch (rescale)
{
case AUTOZOOM:
plot.getXYGraph().performAutoScale();
break;
case STAGGER:
plot.getXYGraph().performStagger();
break;
default:
break;
}
}
});
}
/** @see ArchiveFetchJobListener */
@Override
public void archiveFetchFailed(final ArchiveFetchJob job,
final ArchiveDataSource archive, final Exception error)
{
if (display == null || display.isDisposed())
display.asyncExec(new Runnable()
{
@Override
public void run()
{
if (display.isDisposed())
return;
final String message = NLS.bind(Messages.ArchiveAccessMessageFmt,
job.getPVItem().getDisplayName());
final String detail = NLS.bind(Messages.ArchiveAccessDetailFmt,
error.getMessage(), archive.getUrl());
new ErrorDetailDialog(shell, Messages.Information, message, detail).open();
job.getPVItem().removeArchiveDataSource(archive);
}
});
}
}
|
package org.intermine.bio.dataconversion;
import java.io.BufferedReader;
import java.io.StringReader;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.Set;
import org.intermine.bio.io.gff3.GFF3Parser;
import org.intermine.dataconversion.ItemsTestCase;
import org.intermine.dataconversion.MockItemWriter;
import org.intermine.metadata.Model;
import org.intermine.model.fulldata.Item;
public class LongOligoGFF3RecordHandlerTest extends ItemsTestCase
{
LongOligoGFF3RecordHandler handler;
GFF3Converter converter;
MockItemWriter writer = new MockItemWriter(new LinkedHashMap<String, Item>());
String seqClsName = "MRNA";
String taxonId = "7227";
String dataSetTitle = "INDAC long oligo data set";
String dataSourceName = "Micklem lab";
public LongOligoGFF3RecordHandlerTest(String arg) {
super(arg);
}
public void setUp() throws Exception {
Model tgtModel = Model.getInstanceByName("genomic");
handler = new LongOligoGFF3RecordHandler(tgtModel);
LongOligoGFF3SeqHandler seqHandler = new LongOligoGFF3SeqHandler();
MockIdResolverFactory resolverFactory = new MockIdResolverFactory("mRNA");
resolverFactory.addResolverEntry("7227", "FBtr0075391", Collections.singleton("CG4314-RA"));
seqHandler.resolverFactory = resolverFactory;
converter = new GFF3Converter(writer, seqClsName, taxonId, dataSourceName, dataSetTitle,
tgtModel, handler, seqHandler);
}
public void tearDown() throws Exception {
converter.close();
}
public void testParse() throws Exception {
String gff = "CG4314-RA\tINDAC_1.0\tmicroarray_oligo\t0\t0\t.\t.\t.\tID=1000044388;olen=14;oaTm=92.25;geneID=CG4314;Alias=12-CG4314-RA_1;sequence=ACACGGGTCAGGAT";
BufferedReader srcReader = new BufferedReader(new StringReader(gff));
converter.parse(srcReader);
converter.storeAll();
// uncomment to write a new items xml file
writeItemsFile(writer.getItems(), "long-oligo_items.xml");
Set<?> expected = readItemSet("LongOligoGFF3RecordHandlerTest.xml");
assertEquals(expected, writer.getItems());
}
}
|
package gov.nci.nih.cagrid.tests.core.steps;
import gov.nci.nih.cagrid.tests.core.util.AntUtils;
import gov.nci.nih.cagrid.tests.core.util.FileUtils;
import gov.nci.nih.cagrid.tests.core.util.IntroduceServiceInfo;
import gov.nci.nih.cagrid.tests.core.util.SourceUtils;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.SAXException;
import com.atomicobject.haste.framework.Step;
public class CreateServiceStep
extends Step
{
private File introduceDir;
private File serviceDir;
private String serviceName;
private String pkg;
private String namespace;
private File serviceXmlDescriptor;
private File schemaDir;
private File[] schemas;
private File implFile;
private File[] jars;
private File[] etcFiles;
private Properties introduceProps;
public CreateServiceStep(File introduceDir, File testDir, File tmpDir)
throws ParserConfigurationException, SAXException, IOException
{
super();
// set introduceDir
this.introduceDir = introduceDir;
// set serviceXmlDescriptor, serviceName, pkg, and namespace
this.serviceXmlDescriptor = new File(testDir, IntroduceServiceInfo.INTRODUCE_SERVICEXML_FILENAME);
IntroduceServiceInfo serviceInfo = new IntroduceServiceInfo(this.serviceXmlDescriptor);
this.serviceName = serviceInfo.getServiceName();
this.namespace = serviceInfo.getNamespace();
this.pkg = serviceInfo.getPackageName();
// set serviceDir
this.serviceDir = new File(tmpDir, serviceName);
this.serviceDir.mkdirs();
// set schemas
this.schemaDir = new File(testDir, "schema");
if (schemaDir.exists()) {
this.schemas = FileUtils.listRecursively(schemaDir, new FileFilter() {
public boolean accept(File file) {
return file.getName().endsWith(".xsd");
}
});
} else {
this.schemas = new File[0];
}
// set implFile
this.implFile = new File(testDir, "src" + File.separator + serviceName + "Impl.java");
// set libJars
File libDir = new File(testDir, "lib");
if (libDir.exists()) {
this.jars = libDir.listFiles(new FileFilter() {
public boolean accept(File file) {
return file.getName().endsWith(".jar");
}
});
} else {
this.jars = new File[0];
}
// set metadata file
this.etcFiles = new File(testDir, "etc").listFiles(new FileFilter() {
public boolean accept(File file) {
return file.getName().endsWith(".xml");
}
});
introduceProps = new Properties();
File introducePropFile = new File(testDir, "introduce.properties");
if (introducePropFile.exists()) {
BufferedInputStream is = new BufferedInputStream(new FileInputStream(introducePropFile));
introduceProps.load(is);
is.close();
}
}
public CreateServiceStep(
File introduceDir, File serviceDir, String serviceName, String pkg, String namespace,
File serviceXmlDescriptor, File schemaDir, File[] schemas, File implFile, File[] jars, File[] etcFiles,
Properties introduceProps
) {
super();
this.introduceDir = introduceDir;
this.serviceDir = serviceDir;
this.serviceName = serviceName;
this.pkg = pkg;
this.namespace = namespace;
this.serviceXmlDescriptor = serviceXmlDescriptor;
this.schemas = schemas;
this.schemaDir = schemaDir;
this.implFile = implFile;
this.jars = jars;
this.etcFiles = etcFiles;
this.introduceProps = introduceProps;
}
public void runStep()
throws IOException, InterruptedException, ParserConfigurationException, SAXException
{
// create skeleton
createSkeleton();
// copy schemas
File schemaDir = new File(serviceDir, "schema" + File.separator + serviceName);
schemaDir.mkdirs();
for (File schema : schemas) {
String path = schema.toString().substring(this.schemaDir.toString().length());
if (path.startsWith("/") || path.startsWith("\\")) path = path.substring(1);
File targetFile = new File(schemaDir, path);
targetFile.getParentFile().mkdirs();
FileUtils.copy(schema, targetFile);
}
// copy interface
FileUtils.copy(serviceXmlDescriptor, new File(serviceDir, IntroduceServiceInfo.INTRODUCE_SERVICEXML_FILENAME));
// copy jars
File libDir = new File(serviceDir, "lib");
libDir.mkdirs();
for (File jar : jars) {
FileUtils.copy(jar, new File(libDir, jar.getName()));
}
// copy metadata
if (etcFiles != null) {
File etcDir = new File(serviceDir, "etc");
for (File file : etcFiles) {
FileUtils.copy(file, new File(etcDir, file.getName()));
}
}
// synchronize
synchronizeSkeleton();
// add implementation
addImplementation();
// rebuild
buildSkeleton();
}
private void addImplementation()
throws ParserConfigurationException, SAXException, IOException
{
String targetPath = pkg.replace('.', File.separatorChar);
File targetJava = new File(serviceDir, "src" + File.separator + targetPath + File.separator + "service" + File.separator + serviceName + "Impl.java");
// add method impl
IntroduceServiceInfo info = new IntroduceServiceInfo(serviceXmlDescriptor);
for (String methodName : info.getMethodNames()) {
SourceUtils.modifyImpl(implFile, targetJava, methodName);
}
// add constructor impl
SourceUtils.modifyImpl(implFile, targetJava, serviceName);
}
private void createSkeleton()
throws IOException, InterruptedException
{
// create properties
Properties sysProps = new Properties();
sysProps.setProperty("introduce.skeleton.service.name", serviceName);
sysProps.setProperty("introduce.skeleton.destination.dir", serviceDir.toString());
sysProps.setProperty("introduce.skeleton.package", pkg);
sysProps.setProperty("introduce.skeleton.package.dir", pkg.replace('.', '/'));
sysProps.setProperty("introduce.skeleton.namespace.domain", namespace);
sysProps.setProperty("introduce.skeleton.extensions", "");
for (Object key : introduceProps.keySet()) {
sysProps.setProperty((String) key, introduceProps.getProperty((String) key));
}
// invoke ant
AntUtils.runAnt(introduceDir, null, IntroduceServiceInfo.INTRODUCE_CREATESERVICE_TASK, sysProps, null);
// invoke ant
AntUtils.runAnt(introduceDir, null, "postCreateService", sysProps, null);
}
private void synchronizeSkeleton()
throws IOException, InterruptedException
{
//String cmd = "ant -Dintroduce.skeleton.destination.dir=BasicAnalyticalService resyncService";
// create properties
Properties sysProps = new Properties();
sysProps.setProperty("introduce.skeleton.destination.dir", serviceDir.toString());
// invoke ant
AntUtils.runAnt(introduceDir, null, IntroduceServiceInfo.INTRODUCE_RESYNCSERVICE_TASK, sysProps, null);
}
private void buildSkeleton()
throws IOException, InterruptedException
{
// invoke ant
AntUtils.runAnt(serviceDir, null, "all", null, null);
}
public File getServiceDir()
{
return serviceDir;
}
public String getServiceName()
{
return serviceName;
}
}
|
package org.eclipse.birt.chart.ui.swt.composites;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.commons.codec.binary.Base64;
import org.eclipse.birt.chart.model.attribute.EmbeddedImage;
import org.eclipse.birt.chart.model.attribute.Fill;
import org.eclipse.birt.chart.model.attribute.Image;
import org.eclipse.birt.chart.model.attribute.impl.EmbeddedImageImpl;
import org.eclipse.birt.chart.model.attribute.impl.ImageImpl;
import org.eclipse.birt.chart.ui.extension.i18n.Messages;
import org.eclipse.birt.chart.ui.util.ChartHelpContextIds;
import org.eclipse.birt.chart.ui.util.ChartUIUtil;
import org.eclipse.birt.chart.ui.util.UIHelper;
import org.eclipse.birt.core.ui.frameworks.taskwizard.WizardBase;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.dialogs.TrayDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
/**
* MarkerIconDialog is invoked when the user chooses "icon" from Marker Type
* Combo box.
*/
public class MarkerIconDialog extends TrayDialog
implements
SelectionListener,
ModifyListener
{
private transient Button btnURL;
private transient Button btnLocal;
private transient Button btnPreview;
private Composite inputArea;
private IconCanvas previewCanvas;
private transient Button btnBrowse;
final private static int URI_TYPE = 0;
final private static int LOCAL_TYPE = 1;
final private static int EMBEDDED_TYPE = 2;
private int selectedType = -1;
private Text uriEditor;
private transient Fill icon;
private Button btnEmbeddedImage;
private boolean validateURL = false;
private Label lblException;
/**
* Constructor
*
* @param parent
* shell of LineSeriesAttributeComposite
* @param iconPalette
* retrieved from LineSeries
*/
public MarkerIconDialog( Shell parent, Fill fill )
{
super( parent );
icon = null;
if ( fill != null )
{
icon = fill.copyInstance( );
}
}
protected Control createContents( Composite parent )
{
ChartUIUtil.bindHelp( parent, ChartHelpContextIds.DIALOG_MARKER_ICON );
getShell( ).setText( Messages.getString( "MarkerIconDialog.Title.MarkerIconSelector" ) ); //$NON-NLS-1$
Control c = super.createContents( parent );
// Check icon type and set UI status.
if ( icon instanceof EmbeddedImage )
{
btnEmbeddedImage.setSelection( true );
switchTo( EMBEDDED_TYPE );
}
else if ( icon instanceof Image )
{
try
{
if ( "file".equals( new URL( ( (Image) icon ).getURL( ) ).getProtocol( ) ) ) //$NON-NLS-1$
{
btnLocal.setSelection( true );
switchTo( LOCAL_TYPE );
}
}
catch ( MalformedURLException e )
{
// do nothing
}
}
c.pack( );
preview();
Point size = getShell( ).computeSize( SWT.DEFAULT, SWT.DEFAULT );
getShell( ).setSize( size.x, size.y );
UIHelper.centerOnScreen( getShell( ) );
return c;
}
protected Control createDialogArea( Composite parent )
{
Composite cmpContent = new Composite( parent, SWT.NONE );
cmpContent.setLayout( new GridLayout( ) );
createSelectionArea( cmpContent );
new Label( cmpContent, SWT.SEPARATOR | SWT.HORIZONTAL ).setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
Composite composite = new Composite( cmpContent, SWT.NONE );
composite.setLayout( new GridLayout( 2, false ) );
createListArea( composite );
createPreviewArea( composite );
new Label( cmpContent, SWT.SEPARATOR | SWT.HORIZONTAL ).setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
return cmpContent;
}
protected Control createButtonBar( Composite parent )
{
Control cmp = super.createButtonBar( parent );
updateButton( );
return cmp;
}
/**
* Selection Area locates in the top of the dialog.
*
* @param parent
* dialog composite
*/
private void createSelectionArea( Composite parent )
{
Composite composite = new Composite( parent, SWT.NONE );
composite.setLayout( new GridLayout( 2, false ) );
Label label = new Label( composite, SWT.NONE );
label.setText( Messages.getString( "MarkerIconDialog.Lbl.SelectIconFrom" ) ); //$NON-NLS-1$
label.setLayoutData( new GridData( GridData.VERTICAL_ALIGN_BEGINNING ) );
Composite selectionArea = new Composite( composite, SWT.NONE );
selectionArea.setLayout( new FillLayout( SWT.VERTICAL ) );
btnURL = new Button( selectionArea, SWT.RADIO );
btnURL.setText( Messages.getString( "MarkerIconDialog.Lbl.URL" ) ); //$NON-NLS-1$
btnURL.addSelectionListener( this );
btnLocal = new Button( selectionArea, SWT.RADIO );
btnLocal.setText( Messages.getString( "MarkerIconDialog.Lbl.Local" ) ); //$NON-NLS-1$
btnLocal.addSelectionListener( this );
btnEmbeddedImage = new Button( selectionArea, SWT.RADIO );
btnEmbeddedImage.setText( Messages.getString("MarkerIconDialog.Button.EmbeddedImage")); //$NON-NLS-1$
btnEmbeddedImage.addSelectionListener( this );
}
/**
* List Area locates in the left middle of the dialog.
*
* @param parent
* dialog composite
*/
private void createListArea( Composite parent )
{
Composite listArea = new Composite( parent, SWT.NONE );
GridData gd = new GridData( GridData.FILL_BOTH
| GridData.HORIZONTAL_ALIGN_BEGINNING );
gd.widthHint = 300;
gd.heightHint = 260;
listArea.setLayoutData( gd );
listArea.setLayout( new GridLayout( ) );
GridLayout gl = new GridLayout( );
gl.marginWidth = 0;
gl.marginHeight = 0;
// Input Area is various depending on the selection (URI or Local).
inputArea = new Composite( listArea, SWT.NONE );
GridData gdInputArea = new GridData( GridData.FILL_BOTH
| GridData.HORIZONTAL_ALIGN_BEGINNING );
inputArea.setLayoutData( gdInputArea );
inputArea.setLayout( gl );
}
/**
* Preview Area locates in the right middle of the dialog.
*
* @param composite
* dialog composite
*/
private void createPreviewArea( Composite composite )
{
Composite previewArea = new Composite( composite, SWT.BORDER );
GridData gd = new GridData( GridData.FILL_BOTH );
gd.widthHint = 250;
gd.heightHint = 260;
previewArea.setLayoutData( gd );
previewArea.setLayout( new FillLayout( ) );
previewCanvas = new IconCanvas( previewArea );
}
/**
* Switch in the Selection Area (URI or Local).
*
* @param type
* 0: URI_TYPE; 1: LOCAL_TYPE
*/
private void switchTo( int type )
{
if ( type == selectedType )
{
// If the selected type is same with the current type,
// Do nothing.
return;
}
// Clear the current Input Area contents.
selectedType = type;
Control[] controls = inputArea.getChildren( );
for ( int i = 0; i < controls.length; i++ )
{
controls[i].dispose( );
}
// Rearrange the layout and contents of Input Area.
switch ( type )
{
case URI_TYPE :
swtichToURIType( );
break;
case LOCAL_TYPE :
swtichToLocalType( );
break;
case EMBEDDED_TYPE :
switchToEmbeddedType( );
}
inputArea.layout( );
updateButton( );
}
private void swtichToURIType( )
{
Label title = new Label( inputArea, SWT.NONE );
title.setLayoutData( new GridData( GridData.VERTICAL_ALIGN_BEGINNING ) );
title.setText( Messages.getString( "MarkerIconDialog.Lbl.EnterURL" ) ); //$NON-NLS-1$
uriEditor = new Text( inputArea, SWT.SINGLE | SWT.BORDER );
uriEditor.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
uriEditor.addModifyListener( this );
Composite innerComp = new Composite( inputArea, SWT.NONE );
innerComp.setLayoutData( new GridData( GridData.HORIZONTAL_ALIGN_END ) );
GridLayout gl = new GridLayout( 2, false );
gl.marginWidth = 0;
gl.marginHeight = 0;
gl.verticalSpacing = 2;
innerComp.setLayout( gl );
btnPreview = new Button( innerComp, SWT.PUSH );
btnPreview.setText( Messages.getString( "MarkerIconDialog.Lbl.Preview" ) ); //$NON-NLS-1$
btnPreview.setLayoutData( new GridData( GridData.HORIZONTAL_ALIGN_END ) );
btnPreview.setEnabled( false );
btnPreview.addSelectionListener( this );
if ( icon != null )
{
String url = ( (Image) icon ).getURL( );
uriEditor.setText( url == null ? "" : url ); //$NON-NLS-1$
}
}
private void swtichToLocalType( )
{
Composite buttonBar = new Composite( inputArea, SWT.NONE );
GridLayout gl = new GridLayout( );
gl.marginWidth = 0;
gl.marginHeight = 0;
buttonBar.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
buttonBar.setLayout( gl );
Label description = new Label( buttonBar, SWT.WRAP );
description.setLayoutData( new GridData( GridData.FILL_HORIZONTAL
| GridData.HORIZONTAL_ALIGN_BEGINNING ) );
description.setText( Messages.getString( "MarkerIconDialog.Lbl.Description" ) ); //$NON-NLS-1$
btnBrowse = new Button( buttonBar, SWT.PUSH );
btnBrowse.setText( Messages.getString( "MarkerIconDialog.Lbl.Browse" ) ); //$NON-NLS-1$
GridData gd = new GridData( GridData.HORIZONTAL_ALIGN_END );
gd.grabExcessHorizontalSpace = true;
btnBrowse.setLayoutData( gd );
btnBrowse.addSelectionListener( this );
}
private void switchToEmbeddedType( )
{
Composite buttonBar = new Composite( inputArea, SWT.NONE );
GridLayout gl = new GridLayout( );
gl.marginWidth = 0;
gl.marginHeight = 0;
buttonBar.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
buttonBar.setLayout( gl );
Label description = new Label( buttonBar, SWT.WRAP );
description.setLayoutData( new GridData( GridData.FILL_HORIZONTAL
| GridData.HORIZONTAL_ALIGN_BEGINNING ) );
description.setText( Messages.getString( "MarkerIconDialog.Label.Description.EmbeddedImage" ) ); //$NON-NLS-1$
btnBrowse = new Button( buttonBar, SWT.PUSH );
btnBrowse.setText( Messages.getString( "MarkerIconDialog.Lbl.Browse" ) ); //$NON-NLS-1$
GridData gd = new GridData( GridData.HORIZONTAL_ALIGN_END );
gd.grabExcessHorizontalSpace = true;
btnBrowse.setLayoutData( gd );
btnBrowse.addSelectionListener( this );
}
private void updateButton( )
{
getButtonOk( ).setEnabled( ( selectedType == LOCAL_TYPE || selectedType == EMBEDDED_TYPE )
&& icon != null
&& ( (Image) icon ).getURL( ) != null
|| ( selectedType == URI_TYPE && validateURL == true ) );
}
private Button getButtonOk( )
{
return getButton( IDialogConstants.OK_ID );
}
/**
* Preview the image when it is a local image file.
*
* @param uri
* Image absolute path without "file:///"
*/
private void preview( String uri )
{
try
{
URL url = new URL( uri );
// there's no need to enable the ok button when processing
url.getContent( );
if ( previewCanvas.loadImage( url ) != null )
{
validateURL = true;
if ( lblException != null && !lblException.isDisposed( ) )
{
lblException.setText( "" ); //$NON-NLS-1$
}
}
}
catch ( Exception e )
{
getButtonOk( ).setEnabled( false );
validateURL = false;
if ( lblException == null || lblException.isDisposed( ) )
{
lblException = new Label( inputArea, SWT.NONE );
lblException.setLayoutData( new GridData( ) );
lblException.setForeground( Display.getDefault( )
.getSystemColor( SWT.COLOR_RED ) );
}
lblException.setText( Messages.getString( "MarkerIconDialog.Exception.InvalidURL" ) );//$NON-NLS-1$
inputArea.layout( );
}
}
private void preview( )
{
if ( icon == null )
{
return;
}
if ( icon instanceof EmbeddedImage )
{
try
{
byte[] data = Base64.decodeBase64( ( (EmbeddedImage) icon ).getData( )
.getBytes( ) );
getButtonOk( ).setEnabled( true );
ByteArrayInputStream bais = new ByteArrayInputStream( data );
BufferedInputStream bis = new BufferedInputStream( bais );
previewCanvas.loadImage( bis );
bis.close( );
}
catch ( Exception e )
{
getButtonOk( ).setEnabled( false );
WizardBase.displayException( e );
}
}
else if ( icon instanceof Image )
{
preview( ((Image)icon).getURL( ) );
}
}
/**
* If there is no palette associated with the marker, create a new palette.
* Otherwise, add the icon into the palette.
*
*/
private void checkIcon( )
{
if ( selectedType == URI_TYPE )
{
if ( icon == null
|| ( (Image) icon ).getURL( ) != null
&& !( (Image) icon ).getURL( )
.equals( trimString( uriEditor.getText( ) ) ) )
{
icon = ImageImpl.create( trimString( uriEditor.getText( ) ) );
}
}
}
protected void okPressed( )
{
checkIcon( );
super.okPressed( );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.swt.events.SelectionListener#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*/
public void widgetSelected( SelectionEvent e )
{
if ( e.widget.equals( btnURL ) )
{
switchTo( URI_TYPE );
if ( icon instanceof EmbeddedImage )
{
previewCanvas.clear( );
icon = null;
}
}
else if ( e.widget.equals( btnLocal ) )
{
switchTo( LOCAL_TYPE );
if ( icon instanceof EmbeddedImage )
{
previewCanvas.clear( );
icon = null;
}
}
else if ( e.widget.equals( btnEmbeddedImage ) )
{
boolean modified = ( selectedType != EMBEDDED_TYPE );
switchTo( EMBEDDED_TYPE );
if ( modified && icon instanceof EmbeddedImage )
{
try
{
preview();
}
catch ( Exception ex )
{
WizardBase.displayException( ex );
}
}
else
{
previewCanvas.clear( );
icon = null;
}
}
else if ( e.widget.equals( btnPreview ) )
{
uriEditor.setText( uriEditor.getText( ).trim( ) );
String path = uriEditor.getText( );
preview( path );
}
else if ( e.widget.equals( btnBrowse ) )
{
FileDialog fileChooser = new FileDialog( getShell( ), SWT.OPEN );
fileChooser.setText( Messages.getString( "MarkerIconDialog.Chooser.Title" ) ); //$NON-NLS-1$
fileChooser.setFilterExtensions( new String[]{
"*.gif", "*.jpg", "*.png" //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
} );
try
{
String path = fileChooser.open( );
if ( path != null )
{
path = new StringBuffer( "file:///" ).append( path ).toString( ); //$NON-NLS-1$
preview( path );
if ( selectedType == EMBEDDED_TYPE )
{
setEmbeddedIcon( path );
}
else
{
icon = ImageImpl.create( path );
}
}
}
catch ( Throwable ex )
{
ex.printStackTrace( );
}
}
updateButton( );
}
private void setEmbeddedIcon( String path ) throws IOException,
MalformedURLException
{
BufferedInputStream bis = new BufferedInputStream( new URL( path ).openStream( ) );
ByteArrayOutputStream bos = new ByteArrayOutputStream( );
byte[] buf = new byte[1024];
int count = bis.read( buf );
while ( count != -1 )
{
bos.write( buf, 0, count );
count = bis.read( buf );
}
if ( bis != null )
{
bis.close( );
}
String data = new String( Base64.encodeBase64( bos.toByteArray( ) ) );
icon = EmbeddedImageImpl.create( path,
data );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.swt.events.SelectionListener#widgetDefaultSelected(org.eclipse.swt.events.SelectionEvent)
*/
public void widgetDefaultSelected( SelectionEvent event )
{
}
/*
* (non-Javadoc)
*
* @see org.eclipse.swt.events.ModifyListener#modifyText(org.eclipse.swt.events.ModifyEvent)
*/
public void modifyText( ModifyEvent e )
{
if ( e.widget.equals( uriEditor ) )
{
btnPreview.setEnabled( trimString( uriEditor.getText( ) ) != null );
getButtonOk( ).setEnabled( false );
}
}
/**
* Trim a string. Removes leading and trailing blanks. If the resulting
* string is empty, normalizes the string to an null string.
*
* @param value
* the string to trim
* @return the trimmed string, or null if the string is empty
*/
private static String trimString( String value )
{
if ( value == null )
{
return null;
}
value = value.trim( );
if ( value.length( ) == 0 )
{
return null;
}
return value;
}
/**
*
* @return Returns an icon palette to Line series.
*/
public Fill getFill( )
{
return icon;
}
}
|
package org.eclipse.birt.chart.ui.swt.composites;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import org.apache.commons.codec.binary.Base64;
import org.eclipse.birt.chart.model.attribute.EmbeddedImage;
import org.eclipse.birt.chart.model.attribute.Fill;
import org.eclipse.birt.chart.model.attribute.Image;
import org.eclipse.birt.chart.model.attribute.impl.EmbeddedImageImpl;
import org.eclipse.birt.chart.model.attribute.impl.ImageImpl;
import org.eclipse.birt.chart.ui.extension.i18n.Messages;
import org.eclipse.birt.chart.ui.swt.wizard.ChartWizardContext;
import org.eclipse.birt.chart.ui.util.ChartHelpContextIds;
import org.eclipse.birt.chart.ui.util.ChartUIUtil;
import org.eclipse.birt.chart.ui.util.UIHelper;
import org.eclipse.birt.core.ui.frameworks.taskwizard.WizardBase;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.dialogs.TrayDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FillLayout;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.FileDialog;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.MessageBox;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Text;
/**
* MarkerIconDialog is invoked when the user chooses "icon" from Marker Type
* Combo box.
*/
public class MarkerIconDialog extends TrayDialog
implements
SelectionListener,
ModifyListener
{
protected transient Button btnURL;
protected transient Button btnLocal;
private transient Button btnPreview;
private Composite inputArea;
private IconCanvas previewCanvas;
private transient Button btnBrowse;
final private static int URI_TYPE = 0;
final private static int LOCAL_TYPE = 1;
final private static int EMBEDDED_TYPE = 2;
private int selectedType = -1;
private Text uriEditor;
private transient Fill icon;
protected Button btnEmbeddedImage;
private Label lblException;
protected ChartWizardContext context;
private boolean hasUriImagePreviewed=false;
private ImageStatus urlImagePreviewStat;
private static enum ImageStatus{
IMAGE_URL_INVALID,
IMAGE_CANNOT_DISPLAY,
IMAGE_CAN_DISPLAY
}
/**
* Constructor
*
* @param parent
* shell of LineSeriesAttributeComposite
* @param iconPalette
* retrieved from LineSeries
*/
public MarkerIconDialog( Shell parent, Fill fill, ChartWizardContext context )
{
super( parent );
icon = null;
if ( fill != null )
{
icon = fill.copyInstance( );
}
this.context = context;
}
protected Control createContents( Composite parent )
{
ChartUIUtil.bindHelp( parent, ChartHelpContextIds.DIALOG_MARKER_ICON );
getShell( ).setText( Messages.getString( "MarkerIconDialog.Title.MarkerIconSelector" ) ); //$NON-NLS-1$
Control c = super.createContents( parent );
// Check icon type and set UI status.
if ( btnEmbeddedImage != null && icon instanceof EmbeddedImage )
{
btnEmbeddedImage.setSelection( true );
switchTo( EMBEDDED_TYPE );
}
else if ( icon instanceof Image )
{
try
{
if ( btnLocal != null
&& "file".equals( new URL( ( (Image) icon ).getURL( ) ).getProtocol( ) ) ) //$NON-NLS-1$
{
btnLocal.setSelection( true );
switchTo( LOCAL_TYPE );
}
}
catch ( MalformedURLException e )
{
// do nothing
}
}
c.pack( );
preview();
Point size = getShell( ).computeSize( SWT.DEFAULT, SWT.DEFAULT );
getShell( ).setSize( size.x, size.y );
UIHelper.centerOnScreen( getShell( ) );
return c;
}
protected Control createDialogArea( Composite parent )
{
Composite cmpContent = new Composite( parent, SWT.NONE );
cmpContent.setLayout( new GridLayout( ) );
createSelectionArea( cmpContent );
new Label( cmpContent, SWT.SEPARATOR | SWT.HORIZONTAL ).setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
Composite composite = new Composite( cmpContent, SWT.NONE );
composite.setLayout( new GridLayout( 2, false ) );
createListArea( composite );
createPreviewArea( composite );
new Label( cmpContent, SWT.SEPARATOR | SWT.HORIZONTAL ).setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
return cmpContent;
}
protected Control createButtonBar( Composite parent )
{
Control cmp = super.createButtonBar( parent );
updateButton( );
return cmp;
}
/**
* Selection Area locates in the top of the dialog.
*
* @param parent
* dialog composite
*/
protected void createSelectionArea( Composite parent )
{
Composite composite = new Composite( parent, SWT.NONE );
composite.setLayout( new GridLayout( 2, false ) );
Label label = new Label( composite, SWT.NONE );
label.setText( Messages.getString( "MarkerIconDialog.Lbl.SelectIconFrom" ) ); //$NON-NLS-1$
label.setLayoutData( new GridData( GridData.VERTICAL_ALIGN_BEGINNING ) );
Composite selectionArea = new Composite( composite, SWT.NONE );
selectionArea.setLayout( new FillLayout( SWT.VERTICAL ) );
btnURL = new Button( selectionArea, SWT.RADIO );
btnURL.setText( Messages.getString( "MarkerIconDialog.Lbl.URL" ) ); //$NON-NLS-1$
btnURL.addSelectionListener( this );
btnLocal = new Button( selectionArea, SWT.RADIO );
btnLocal.setText( Messages.getString( "MarkerIconDialog.Lbl.Local" ) ); //$NON-NLS-1$
btnLocal.addSelectionListener( this );
btnEmbeddedImage = new Button( selectionArea, SWT.RADIO );
btnEmbeddedImage.setText( Messages.getString("MarkerIconDialog.Button.EmbeddedImage")); //$NON-NLS-1$
btnEmbeddedImage.addSelectionListener( this );
}
/**
* List Area locates in the left middle of the dialog.
*
* @param parent
* dialog composite
*/
private void createListArea( Composite parent )
{
Composite listArea = new Composite( parent, SWT.NONE );
GridData gd = new GridData( GridData.FILL_BOTH
| GridData.HORIZONTAL_ALIGN_BEGINNING );
gd.widthHint = 300;
gd.heightHint = 260;
listArea.setLayoutData( gd );
listArea.setLayout( new GridLayout( ) );
GridLayout gl = new GridLayout( );
gl.marginWidth = 0;
gl.marginHeight = 0;
// Input Area is various depending on the selection (URI or Local).
inputArea = new Composite( listArea, SWT.NONE );
GridData gdInputArea = new GridData( GridData.FILL_BOTH
| GridData.HORIZONTAL_ALIGN_BEGINNING );
inputArea.setLayoutData( gdInputArea );
inputArea.setLayout( gl );
}
/**
* Preview Area locates in the right middle of the dialog.
*
* @param composite
* dialog composite
*/
private void createPreviewArea( Composite composite )
{
Composite previewArea = new Composite( composite, SWT.BORDER );
GridData gd = new GridData( GridData.FILL_BOTH );
gd.widthHint = 250;
gd.heightHint = 260;
previewArea.setLayoutData( gd );
previewArea.setLayout( new FillLayout( ) );
previewCanvas = new IconCanvas( previewArea );
}
/**
* Switch in the Selection Area (URI or Local).
*
* @param type
* 0: URI_TYPE; 1: LOCAL_TYPE
*/
private void switchTo( int type )
{
if ( type == selectedType )
{
// If the selected type is same with the current type,
// Do nothing.
return;
}
// Clear the current Input Area contents.
selectedType = type;
Control[] controls = inputArea.getChildren( );
for ( int i = 0; i < controls.length; i++ )
{
controls[i].dispose( );
}
// Rearrange the layout and contents of Input Area.
switch ( type )
{
case URI_TYPE :
swtichToURIType( );
break;
case LOCAL_TYPE :
swtichToLocalType( );
break;
case EMBEDDED_TYPE :
switchToEmbeddedType( );
}
inputArea.layout( );
updateButton( );
}
private void swtichToURIType( )
{
Label title = new Label( inputArea, SWT.NONE );
title.setLayoutData( new GridData( GridData.VERTICAL_ALIGN_BEGINNING ) );
title.setText( Messages.getString( "MarkerIconDialog.Lbl.EnterURL" ) ); //$NON-NLS-1$
uriEditor = new Text( inputArea, SWT.SINGLE | SWT.BORDER );
uriEditor.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
uriEditor.addModifyListener( this );
Composite innerComp = new Composite( inputArea, SWT.NONE );
innerComp.setLayoutData( new GridData( GridData.HORIZONTAL_ALIGN_END ) );
GridLayout gl = new GridLayout( 2, false );
gl.marginWidth = 0;
gl.marginHeight = 0;
gl.verticalSpacing = 2;
innerComp.setLayout( gl );
btnPreview = new Button( innerComp, SWT.PUSH );
btnPreview.setText( Messages.getString( "MarkerIconDialog.Lbl.Preview" ) ); //$NON-NLS-1$
btnPreview.setLayoutData( new GridData( GridData.HORIZONTAL_ALIGN_END ) );
btnPreview.setEnabled( false );
btnPreview.addSelectionListener( this );
if ( icon != null )
{
String url = ( (Image) icon ).getURL( );
uriEditor.setText( url == null ? "" : url ); //$NON-NLS-1$
}
}
private void swtichToLocalType( )
{
Composite buttonBar = new Composite( inputArea, SWT.NONE );
GridLayout gl = new GridLayout( );
gl.marginWidth = 0;
gl.marginHeight = 0;
buttonBar.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
buttonBar.setLayout( gl );
Label description = new Label( buttonBar, SWT.WRAP );
description.setLayoutData( new GridData( GridData.FILL_HORIZONTAL
| GridData.HORIZONTAL_ALIGN_BEGINNING ) );
description.setText( Messages.getString( "MarkerIconDialog.Lbl.Description" ) ); //$NON-NLS-1$
btnBrowse = new Button( buttonBar, SWT.PUSH );
btnBrowse.setText( Messages.getString( "MarkerIconDialog.Lbl.Browse" ) ); //$NON-NLS-1$
GridData gd = new GridData( GridData.HORIZONTAL_ALIGN_END );
gd.grabExcessHorizontalSpace = true;
btnBrowse.setLayoutData( gd );
btnBrowse.addSelectionListener( this );
}
private void switchToEmbeddedType( )
{
Composite buttonBar = new Composite( inputArea, SWT.NONE );
GridLayout gl = new GridLayout( );
gl.marginWidth = 0;
gl.marginHeight = 0;
buttonBar.setLayoutData( new GridData( GridData.FILL_HORIZONTAL ) );
buttonBar.setLayout( gl );
Label description = new Label( buttonBar, SWT.NONE );
description.setLayoutData( new GridData( GridData.HORIZONTAL_ALIGN_BEGINNING ) );
description.setText( Messages.getString( "MarkerIconDialog.Label.Description.EmbeddedImage" ) ); //$NON-NLS-1$
btnBrowse = new Button( buttonBar, SWT.PUSH );
btnBrowse.setText( Messages.getString( "MarkerIconDialog.Lbl.Browse" ) ); //$NON-NLS-1$
GridData gd = new GridData( GridData.HORIZONTAL_ALIGN_END );
gd.grabExcessHorizontalSpace = true;
btnBrowse.setLayoutData( gd );
btnBrowse.addSelectionListener( this );
}
private void updateButton( )
{
// other case
boolean localAndEmbededTypeValid = ( selectedType == LOCAL_TYPE || selectedType == EMBEDDED_TYPE )
&& icon != null
&& ( (Image) icon ).getURL( ) != null;
// uri case
boolean isUriTextEmpty = ( uriEditor == null )
|| uriEditor.isDisposed( )
|| ( trimString( uriEditor.getText( ) ) == null );
boolean uriTypeValid = ( selectedType == URI_TYPE )
&& ( !isUriTextEmpty );
getButtonOk( ).setEnabled( localAndEmbededTypeValid || uriTypeValid );
}
private Button getButtonOk( )
{
return getButton( IDialogConstants.OK_ID );
}
/**
* Preview the image when it is a local image file.
*
* @param uri
* Image absolute path without "file:///"
*/
private ImageStatus preview( String uri )
{
hasUriImagePreviewed=true;
try
{
URL url = new URL( uri );
// there's no need to enable the ok button when processing
url.getContent( );
if ( previewCanvas.loadImage( url ) != null )
{
emptyExceptionText();
}
return ImageStatus.IMAGE_CAN_DISPLAY;
}
catch (MalformedURLException malEx){
showMessage(Messages.getString( "MarkerIconDialog.Exception.InvalidURL" ));//$NON-NLS-1$
return ImageStatus.IMAGE_URL_INVALID;
}
catch ( Exception ex )
{
showMessage(Messages.getString("MarkerIconDialog.Exception.ImageNotAvailable")); //$NON-NLS-1$
return ImageStatus.IMAGE_CANNOT_DISPLAY;
}
}
private void showMessage(String text){
if ( lblException == null || lblException.isDisposed( ) )
{
lblException = new Label( inputArea, SWT.NONE );
lblException.setLayoutData( new GridData( ) );
lblException.setForeground( Display.getDefault( )
.getSystemColor( SWT.COLOR_RED ) );
}
lblException.setText(text );
inputArea.layout( );
}
private void preview( )
{
if ( icon == null )
{
return;
}
if ( icon instanceof EmbeddedImage )
{
try
{
byte[] data = Base64.decodeBase64( ( (EmbeddedImage) icon ).getData( )
.getBytes( ) );
getButtonOk( ).setEnabled( true );
ByteArrayInputStream bais = new ByteArrayInputStream( data );
BufferedInputStream bis = new BufferedInputStream( bais );
previewCanvas.loadImage( bis );
bis.close( );
}
catch ( Exception e )
{
getButtonOk( ).setEnabled( false );
WizardBase.displayException( e );
}
}
else if ( icon instanceof Image )
{
urlImagePreviewStat=preview( ((Image)icon).getURL( ) );
}
}
/**
* If there is no palette associated with the marker, create a new palette.
* Otherwise, add the icon into the palette.
*
*/
private boolean checkIcon( )
{
if ( selectedType == URI_TYPE )
{
// load image to see if can display normally;
String uri = trimString( uriEditor.getText( ) );
ImageStatus result;
// If the image has been previewed before,then use the cached preview result
if ( urlImagePreviewStat != null && hasUriImagePreviewed )
{
result = urlImagePreviewStat;
}
else
{
urlImagePreviewStat = preview( uri );
result = urlImagePreviewStat;
}
uriEditor.setText( uri );
hasUriImagePreviewed=true;
switch ( result )
{
case IMAGE_CANNOT_DISPLAY :
MessageBox mb = new MessageBox( Display.getDefault( )
.getActiveShell( ), SWT.ICON_WARNING
| SWT.OK
| SWT.CANCEL );
mb.setText( Messages.getString( "MarkerIconDialog.ImageNotAvailableWarning" ) ); //$NON-NLS-1$
mb.setMessage( Messages.getString( "MarkerIconDialog.Exception.ImageNotAvailable" ) //$NON-NLS-1$
+ " " + Messages.getString( "MarkerIconDialog.ImageNotAvailableWarningMessage" ) ); //$NON-NLS-1$ //$NON-NLS-2$
int messageResult = mb.open( );
if ( messageResult != SWT.OK )
{
return false;
}
break;
case IMAGE_URL_INVALID :
return false;
case IMAGE_CAN_DISPLAY :
break;
default:
return false;
}
if ( icon == null
|| ( (Image) icon ).getURL( ) != null
&& !( (Image) icon ).getURL( ).equals( uri ) )
{
icon = ImageImpl.create( uri );
}
}
return true;
}
protected void okPressed( )
{
if(!checkIcon( )){
return;
}
super.okPressed( );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.swt.events.SelectionListener#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*/
public void widgetSelected( SelectionEvent e )
{
if ( e.widget.equals( btnURL ) )
{
switchTo( URI_TYPE );
if ( icon instanceof EmbeddedImage )
{
previewCanvas.clear( );
icon = null;
}
}
else if ( e.widget.equals( btnLocal ) )
{
switchTo( LOCAL_TYPE );
if ( icon instanceof EmbeddedImage )
{
previewCanvas.clear( );
icon = null;
}
}
else if ( e.widget.equals( btnEmbeddedImage ) )
{
boolean modified = ( selectedType != EMBEDDED_TYPE );
switchTo( EMBEDDED_TYPE );
if ( modified && icon instanceof EmbeddedImage )
{
try
{
preview();
}
catch ( Exception ex )
{
WizardBase.displayException( ex );
}
}
else
{
previewCanvas.clear( );
icon = null;
}
}
else if ( e.widget.equals( btnPreview ) )
{
uriEditor.setText( uriEditor.getText( ).trim( ) );
String path = uriEditor.getText( );
urlImagePreviewStat=preview( path );
}
else if ( e.widget.equals( btnBrowse ) )
{
FileDialog fileChooser = new FileDialog( getShell( ), SWT.OPEN );
fileChooser.setText( Messages.getString( "MarkerIconDialog.Chooser.Title" ) ); //$NON-NLS-1$
fileChooser.setFilterExtensions( new String[]{
"*.gif", "*.jpg", "*.png" //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
} );
try
{
String path = fileChooser.open( );
if ( path != null )
{
path = new StringBuffer( "file:///" ).append( path ).toString( ); //$NON-NLS-1$
preview( path );
if ( selectedType == EMBEDDED_TYPE )
{
setEmbeddedIcon( path );
}
else
{
icon = ImageImpl.create( path );
}
}
}
catch ( Throwable ex )
{
ex.printStackTrace( );
}
}
updateButton( );
}
private void setEmbeddedIcon( String path ) throws IOException,
MalformedURLException
{
BufferedInputStream bis = new BufferedInputStream( new URL( path ).openStream( ) );
ByteArrayOutputStream bos = new ByteArrayOutputStream( );
byte[] buf = new byte[1024];
int count = bis.read( buf );
while ( count != -1 )
{
bos.write( buf, 0, count );
count = bis.read( buf );
}
if ( bis != null )
{
bis.close( );
}
String data = new String( Base64.encodeBase64( bos.toByteArray( ) ) );
icon = EmbeddedImageImpl.create( path,
data );
}
/*
* (non-Javadoc)
*
* @see org.eclipse.swt.events.SelectionListener#widgetDefaultSelected(org.eclipse.swt.events.SelectionEvent)
*/
public void widgetDefaultSelected( SelectionEvent event )
{
}
/*
* (non-Javadoc)
*
* @see org.eclipse.swt.events.ModifyListener#modifyText(org.eclipse.swt.events.ModifyEvent)
*/
public void modifyText( ModifyEvent e )
{
if ( e.widget.equals( uriEditor ) )
{
boolean isTextEmpty=trimString( uriEditor.getText( ) ) != null;
btnPreview.setEnabled( isTextEmpty );
getButtonOk( ).setEnabled(isTextEmpty);
hasUriImagePreviewed=false;
}
}
private void emptyExceptionText(){
if ( lblException != null && !lblException.isDisposed( ) )
{
lblException.setText( "" ); //$NON-NLS-1$
}
}
/**
* Trim a string. Removes leading and trailing blanks. If the resulting
* string is empty, normalizes the string to an null string.
*
* @param value
* the string to trim
* @return the trimmed string, or null if the string is empty
*/
private static String trimString( String value )
{
if ( value == null )
{
return null;
}
value = value.trim( );
if ( value.length( ) == 0 )
{
return null;
}
return value;
}
/**
*
* @return Returns an icon palette to Line series.
*/
public Fill getFill( )
{
return icon;
}
}
|
package edu.duke.cabig.c3pr.web.registration;
import java.util.Date;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.springframework.validation.BindException;
import org.springframework.web.servlet.ModelAndView;
import org.springframework.web.util.WebUtils;
import edu.duke.cabig.c3pr.domain.Epoch;
import edu.duke.cabig.c3pr.domain.ScheduledEpoch;
import edu.duke.cabig.c3pr.domain.StudySubject;
import edu.duke.cabig.c3pr.utils.DateUtil;
import edu.duke.cabig.c3pr.utils.StringUtils;
import edu.duke.cabig.c3pr.utils.web.ControllerTools;
import edu.duke.cabig.c3pr.web.registration.tabs.AssignArmTab;
import edu.duke.cabig.c3pr.web.registration.tabs.CompanionRegistrationTab;
import edu.duke.cabig.c3pr.web.registration.tabs.EligibilityCriteriaTab;
import edu.duke.cabig.c3pr.web.registration.tabs.EnrollmentDetailsTab;
import edu.duke.cabig.c3pr.web.registration.tabs.ReviewSubmitTab;
import edu.duke.cabig.c3pr.web.registration.tabs.StratificationTab;
import gov.nih.nci.cabig.ctms.web.tabs.Flow;
/**
* @author Ramakrishna
*
*/
public class TransferEpochRegistrationController<C extends StudySubjectWrapper> extends RegistrationController<C> {
/**
* Logger for this class
*/
private static final Logger logger = Logger.getLogger(TransferEpochRegistrationController.class);
public TransferEpochRegistrationController() {
super("Change Epoch");
}
@Override
protected void intializeFlows(Flow flow) {
flow.addTab(new EnrollmentDetailsTab());
flow.addTab(new EligibilityCriteriaTab());
flow.addTab(new StratificationTab());
flow.addTab(new AssignArmTab());
flow.addTab(new CompanionRegistrationTab());
flow.addTab(new ReviewSubmitTab());
setFlow(flow);
}
@Override
protected Object formBackingObject(HttpServletRequest request)
throws Exception {
StudySubjectWrapper wrapper= (StudySubjectWrapper)super.formBackingObject(request);
ScheduledEpoch scheduledEpoch;
if (WebUtils.hasSubmitParameter(request, "studySubject.scheduledEpoch.offEpochDate") &&
!StringUtils.isBlank(request.getParameter("studySubject.scheduledEpoch.offEpochReasonText"))){
Date offEpochDate;
offEpochDate = DateUtil.getUtilDateFromString(request.getParameter("studySubject.scheduledEpoch.offEpochDate"),"mm/dd/yyyy");
wrapper.getStudySubject().getScheduledEpoch().setOffEpochDate(offEpochDate);
}
if (WebUtils.hasSubmitParameter(request, "studySubject.scheduledEpoch.offEpochReasonText") &&
!StringUtils.isBlank(request.getParameter("studySubject.scheduledEpoch.offEpochReasonText"))){
String offEpochReasonText = request.getParameter("studySubject.scheduledEpoch.offEpochReasonText");
wrapper.getStudySubject().getScheduledEpoch().setOffEpochReasonText(offEpochReasonText);
}
if(WebUtils.hasSubmitParameter(request, "epoch")){
Integer id = Integer.parseInt(request.getParameter("epoch"));
Epoch epoch = epochDao.getById(id);
epochDao.initialize(epoch);
if (epoch.getTreatmentIndicator()) {
(epoch).getArms().size();
scheduledEpoch = new ScheduledEpoch();
}
else {
scheduledEpoch = new ScheduledEpoch();
}
scheduledEpoch.setEpoch(epoch);
wrapper.getStudySubject().addScheduledEpoch(scheduledEpoch);
registrationControllerUtils.buildCommandObject(wrapper.getStudySubject());
}
return wrapper;
}
@Override
protected void onBindOnNewForm(HttpServletRequest request, Object command)
throws Exception {
// TODO Auto-generated method stub
super.onBindOnNewForm(request, command);
}
@Override
protected ModelAndView processFinish(HttpServletRequest request, HttpServletResponse response,
Object command, BindException errors) throws Exception {
StudySubjectWrapper wrapper = (StudySubjectWrapper) command;
StudySubject studySubject = wrapper.getStudySubject();
if(wrapper.getShouldTransfer())
studySubject = studySubjectRepository.transferSubject(studySubject);
else if(wrapper.getShouldEnroll()){
studySubject=studySubjectRepository.enroll(studySubject);
}else if(wrapper.getShouldRegister()){
studySubject=studySubjectRepository.register(studySubject.getIdentifiers());
}else if(wrapper.getShouldReserve()){
studySubject=studySubjectRepository.reserve(studySubject.getIdentifiers());
}else{
studySubject=studySubjectRepository.save(studySubject);
}
if (logger.isDebugEnabled()) {
logger.debug("processFinish(HttpServletRequest, HttpServletResponse, Object, BindException) - registration service call over"); //$NON-NLS-1$
}
if(WebUtils.hasSubmitParameter(request, "decorator") && "noheaderDecorator".equals(request.getParameter("decorator"))){
return new ModelAndView("redirect:confirm?" + ControllerTools.createParameterString(studySubject.getSystemAssignedIdentifiers().get(0))+"&decorator=" + request.getParameter("decorator"));
}else{
return new ModelAndView("redirect:confirm?" + ControllerTools.createParameterString(studySubject.getSystemAssignedIdentifiers().get(0)));
}
}
}
|
package com.archimatetool.editor.diagram.policies;
import java.util.List;
import org.eclipse.draw2d.IFigure;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.gef.GraphicalEditPart;
import org.eclipse.gef.Request;
import org.eclipse.gef.RequestConstants;
import org.eclipse.gef.commands.Command;
import org.eclipse.gef.commands.CompoundCommand;
import org.eclipse.gef.editpolicies.GraphicalNodeEditPolicy;
import org.eclipse.gef.requests.CreateConnectionRequest;
import org.eclipse.gef.requests.ReconnectRequest;
import com.archimatetool.editor.diagram.commands.CreateDiagramArchimateConnectionWithDialogCommand;
import com.archimatetool.editor.diagram.commands.CreateDiagramConnectionCommand;
import com.archimatetool.editor.diagram.commands.DiagramCommandFactory;
import com.archimatetool.editor.diagram.commands.ReconnectDiagramConnectionCommand;
import com.archimatetool.editor.diagram.figures.ITargetFeedbackFigure;
import com.archimatetool.editor.model.DiagramModelUtils;
import com.archimatetool.model.IArchimateConcept;
import com.archimatetool.model.IArchimatePackage;
import com.archimatetool.model.IArchimateRelationship;
import com.archimatetool.model.IConnectable;
import com.archimatetool.model.IDiagramModelArchimateComponent;
import com.archimatetool.model.IDiagramModelArchimateConnection;
import com.archimatetool.model.IDiagramModelConnection;
import com.archimatetool.model.IDiagramModelGroup;
import com.archimatetool.model.IDiagramModelNote;
import com.archimatetool.model.IDiagramModelReference;
import com.archimatetool.model.util.ArchimateModelUtils;
/**
* Archimate Diagram Connection Policy
*
* @author Phillip Beauvoir
*/
public class ArchimateDiagramConnectionPolicy extends GraphicalNodeEditPolicy {
@Override
protected Command getConnectionCreateCommand(CreateConnectionRequest request) {
CreateDiagramConnectionCommand cmd = null;
EClass classType = (EClass)request.getNewObjectType();
IConnectable source = (IConnectable)getHost().getModel();
// Plain Connection
if(classType == IArchimatePackage.eINSTANCE.getDiagramModelConnection()) {
if(isValidConnectionSource(source, classType)) {
cmd = new CreateDiagramConnectionCommand(request);
}
}
// Archimate Model Component Source
else if(source instanceof IDiagramModelArchimateComponent) {
if(isValidConnectionSource(source, classType)) {
cmd = new CreateDiagramArchimateConnectionWithDialogCommand(request);
}
}
if(cmd != null) {
cmd.setSource(source);
request.setStartCommand(cmd);
}
return cmd;
}
@Override
protected Command getConnectionCompleteCommand(CreateConnectionRequest request) {
IConnectable source = (IConnectable)request.getSourceEditPart().getModel();
IConnectable target = (IConnectable)getHost().getModel();
EClass relationshipType = (EClass)request.getNewObjectType();
CreateDiagramConnectionCommand cmd = null;
if(isValidConnection(source, target, relationshipType)) {
// Pick up the command that was created in getConnectionCreateCommand(CreateConnectionRequest request)
cmd = (CreateDiagramConnectionCommand)request.getStartCommand();
cmd.setTarget(target);
}
return cmd;
}
@Override
protected Command getReconnectSourceCommand(ReconnectRequest request) {
return getReconnectCommand(request, true);
}
@Override
protected Command getReconnectTargetCommand(ReconnectRequest request) {
return getReconnectCommand(request, false);
}
/**
* Create a ReconnectCommand
*/
protected Command getReconnectCommand(ReconnectRequest request, boolean isSourceCommand) {
IDiagramModelConnection connection = (IDiagramModelConnection)request.getConnectionEditPart().getModel();
// The re-connected end component
IConnectable newComponent = (IConnectable)getHost().getModel();
// Get the type of connection (plain) or relationship (if archimate connection) and check if it is valid
EClass type = connection.eClass();
if(connection instanceof IDiagramModelArchimateConnection) {
type = ((IDiagramModelArchimateConnection)connection).getArchimateRelationship().eClass();
}
if(isSourceCommand) {
if(!isValidConnection(newComponent, connection.getTarget(), type)) {
return null;
}
}
else {
if(!isValidConnection(connection.getSource(), newComponent, type)) {
return null;
}
}
// Archimate type reconnection
if(connection instanceof IDiagramModelArchimateConnection && newComponent instanceof IDiagramModelArchimateComponent) {
return createArchimateReconnectCommand((IDiagramModelArchimateConnection)connection, (IDiagramModelArchimateComponent)newComponent, isSourceCommand);
}
// Plain reconnection
return createReconnectCommand(connection, newComponent, isSourceCommand);
}
private Command createArchimateReconnectCommand(IDiagramModelArchimateConnection connection, IDiagramModelArchimateComponent dmc, boolean isSourceCommand) {
IArchimateRelationship relationship = connection.getArchimateRelationship();
IArchimateConcept newConcept = dmc.getArchimateConcept();
CompoundCommand cmd = new CompoundCommand() {
@Override
public void execute() {
// Lazily create commands
createCommands();
super.execute();
}
@Override
public boolean canExecute() {
// Can't reconnect to the same dmc
return isSourceCommand ? connection.getSource() != dmc : connection.getTarget() != dmc;
}
// Add commands for all instances of diagram connections
private void createCommands() {
for(IDiagramModelArchimateConnection matchingConnection : relationship.getReferencingDiagramConnections()) {
// The same diagram
if(matchingConnection.getDiagramModel() == connection.getDiagramModel()) {
// If we are reconnecting to a dmc with a different concept then reconnect all instances on this diagram
if(isNewConnection(matchingConnection, dmc, isSourceCommand)) {
add(createReconnectCommand(matchingConnection, dmc, isSourceCommand));
}
// Else if we are reconnecting to a dmc with the same concept then reconnect only this one instance of the connection
else if(connection == matchingConnection) {
add(createReconnectCommand(matchingConnection, dmc, isSourceCommand));
}
}
// A different diagram
else {
// Does the new target concept exist on the diagram?
List<IDiagramModelArchimateComponent> list = DiagramModelUtils.findDiagramModelComponentsForArchimateConcept(matchingConnection.getDiagramModel(), newConcept);
// Yes, so reconnect to it *if* it is different than the existing concept
if(!list.isEmpty()) {
// Get the first instance of the new component
IDiagramModelArchimateComponent newComponent = list.get(0);
// If the instance's concept is different than the original concept then reconnect
if(isNewConnection(matchingConnection, newComponent, isSourceCommand)) {
add(createReconnectCommand(matchingConnection, newComponent, isSourceCommand));
}
}
// No, so delete the matching connection
else {
add(DiagramCommandFactory.createDeleteDiagramConnectionCommand(matchingConnection));
}
}
}
}
};
return cmd;
}
/**
* Return true if the concept in dmc is not already connected to the concept in connection
*/
private boolean isNewConnection(IDiagramModelArchimateConnection connection, IDiagramModelArchimateComponent dmc, boolean isSourceCommand) {
if(isSourceCommand) {
return !dmc.getArchimateConcept().getSourceRelationships().contains(connection.getArchimateConcept());
}
else {
return !dmc.getArchimateConcept().getTargetRelationships().contains(connection.getArchimateConcept());
}
}
private Command createReconnectCommand(IDiagramModelConnection connection, IConnectable connectable, boolean isSourceCommand) {
ReconnectDiagramConnectionCommand cmd = new ReconnectDiagramConnectionCommand(connection);
if(isSourceCommand) {
cmd.setNewSource(connectable);
}
else {
cmd.setNewTarget(connectable);
}
return cmd;
}
@Override
public void eraseTargetFeedback(Request request) {
IFigure figure = ((GraphicalEditPart)getHost()).getFigure();
if(figure instanceof ITargetFeedbackFigure) {
((ITargetFeedbackFigure)figure).eraseTargetFeedback();
}
}
@Override
public void showTargetFeedback(Request request) {
if(request.getType().equals(RequestConstants.REQ_CONNECTION_START)) {
if(getConnectionCreateCommand((CreateConnectionRequest)request) != null) {
showTargetFeedback();
}
}
if(request.getType().equals(RequestConstants.REQ_CONNECTION_END)) {
if(getConnectionCompleteCommand((CreateConnectionRequest)request) != null) {
showTargetFeedback();
}
}
if(request.getType().equals(RequestConstants.REQ_RECONNECT_SOURCE)) {
if(getReconnectSourceCommand((ReconnectRequest)request) != null) {
showTargetFeedback();
}
}
if(request.getType().equals(RequestConstants.REQ_RECONNECT_TARGET)) {
if(getReconnectTargetCommand((ReconnectRequest)request) != null) {
showTargetFeedback();
}
}
}
private void showTargetFeedback() {
IFigure figure = ((GraphicalEditPart)getHost()).getFigure();
if(figure instanceof ITargetFeedbackFigure) {
((ITargetFeedbackFigure)figure).showTargetFeedback();
}
}
/**
* @return True if valid source for a connection type
*/
static boolean isValidConnectionSource(IConnectable source, EClass relationshipType) {
// Special case if relationshipType == null. Means that the Magic connector is being used
if(relationshipType == null) {
return true;
}
// This first: Diagram Connection from/to notes/groups/diagram refs
if(relationshipType == IArchimatePackage.eINSTANCE.getDiagramModelConnection()) {
return true;
}
// Archimate Concept source
if(source instanceof IDiagramModelArchimateComponent) {
IDiagramModelArchimateComponent dmc = (IDiagramModelArchimateComponent)source;
return ArchimateModelUtils.isValidRelationshipStart(dmc.getArchimateConcept(), relationshipType);
}
return false;
}
/**
* @param source
* @param target
* @param relationshipType
* @return True if valid connection source/target for connection type
*/
static boolean isValidConnection(IConnectable source, IConnectable target, EClass relationshipType) {
/*
* Diagram Connection from/to notes/groups/diagram refs.
* Allowed between notes, visual groups, diagram refs and ArchiMate components
*/
if(relationshipType == IArchimatePackage.eINSTANCE.getDiagramModelConnection()) {
// Not circular
// Edit - allowed for JB!
//if(source == target) {
//return false;
// Notes
if(source instanceof IDiagramModelNote || target instanceof IDiagramModelNote) {
return true;
}
// Groups
if(source instanceof IDiagramModelGroup || target instanceof IDiagramModelGroup) {
// Edit - allowed for JB!
return true;
//return !(source instanceof IDiagramModelArchimateComponent) && !(target instanceof IDiagramModelArchimateComponent);
}
// Diagram Refs
if(source instanceof IDiagramModelReference || target instanceof IDiagramModelReference) {
// Edit - allowed for JB!
return true;
//return !(source instanceof IDiagramModelArchimateComponent) && !(target instanceof IDiagramModelArchimateComponent);
}
return false;
}
// Connection from Archimate concept to Archimate concept (but not from relation to relation)
if((source instanceof IDiagramModelArchimateComponent && target instanceof IDiagramModelArchimateComponent) &&
!(source instanceof IDiagramModelArchimateConnection && target instanceof IDiagramModelArchimateConnection)) {
// Special case if relationshipType == null. Means that the Magic connector is being used
if(relationshipType == null) {
return true;
}
IArchimateConcept sourceConcept = ((IDiagramModelArchimateComponent)source).getArchimateConcept();
IArchimateConcept targetConcept = ((IDiagramModelArchimateComponent)target).getArchimateConcept();
return ArchimateModelUtils.isValidRelationship(sourceConcept, targetConcept, relationshipType);
}
return false;
}
}
|
package org.apereo.cas.util.serialization;
import lombok.experimental.UtilityClass;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apereo.cas.ticket.TicketGrantingTicket;
import org.apereo.cas.ticket.proxy.ProxyGrantingTicket;
import org.apereo.cas.util.InetAddressUtils;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This is {@link TicketIdSanitizationUtils} which attempts to remove
* sensitive ticket ids from a given String.
*
* @author Misagh Moayyed
* @since 5.0.0
*/
@Slf4j
@UtilityClass
public class TicketIdSanitizationUtils {
private static final Pattern TICKET_ID_PATTERN = Pattern.compile("(?:(?:" +TicketGrantingTicket.PREFIX + "|"
+ ProxyGrantingTicket.PROXY_GRANTING_TICKET_IOU_PREFIX + "|" + ProxyGrantingTicket.PROXY_GRANTING_TICKET_PREFIX
+ ")-\\d+-)([\\w.-]+)");
/**
* Specifies the ending tail length of the ticket id that would still be visible in the output
* for troubleshooting purposes.
*/
private static final int VISIBLE_TAIL_LENGTH = 10;
/**
* Gets the default suffix used when the default ticket id generator is used so the poroper
* visible length is shown.
*/
private static final int HOST_NAME_LENGTH = InetAddressUtils.getCasServerHostName().length();
/**
* Remove ticket id from the message.
*
* @param msg the message
* @return the modified message with tgt id removed
*/
public static String sanitize(final String msg) {
String modifiedMessage = msg;
if (StringUtils.isNotBlank(msg) && !Boolean.getBoolean("CAS_TICKET_ID_SANITIZE_SKIP")) {
final Matcher matcher = TICKET_ID_PATTERN.matcher(msg);
while (matcher.find()) {
final String match = matcher.group();
final int replaceLength = matcher.group(1).length()
- VISIBLE_TAIL_LENGTH
- (HOST_NAME_LENGTH + 1);
final String newId = match.replace(
matcher.group(1).substring(0,replaceLength),
StringUtils.repeat("*", replaceLength));
modifiedMessage = modifiedMessage.replaceAll(match, newId);
}
}
return modifiedMessage;
}
}
|
import junit.extensions.TestSetup;
import junit.framework.Test;
import junit.framework.TestSuite;
import java.util.List;
import java.util.ArrayList;
import java.util.Enumeration;
import com.thoughtworks.selenium.Selenium;
import com.thoughtworks.selenium.DefaultSelenium;
/**
* Starts the Browser only once per Test Suite.
*/
public class XWikiSeleniumTestSetup extends TestSetup
{
private static final int SELENIUM_PORT = Integer.parseInt(System.getProperty("seleniumPort", "4444"));
private static final String PORT = System.getProperty("xwikiPort", "8080");
private static final String BASE_URL = "http://localhost:" + PORT;
private static final String BROWSER = System.getProperty("browser", "*firefox");
private Selenium selenium;
public XWikiSeleniumTestSetup(Test test)
{
super(test);
}
protected void setUp() throws Exception
{
this.selenium = new DefaultSelenium("localhost", SELENIUM_PORT, BROWSER, BASE_URL) {
// TODO: Remove this when Selenium fixes the problem.
@Override public void open(String url)
{
commandProcessor.doCommand("open", new String[] {url, "true"});
}
};
// Sets the Selenium object in all tests
for (AbstractXWikiTestCase test: getTests(getTest())) {
test.setSelenium(this.selenium);
}
this.selenium.start();
}
protected void tearDown() throws Exception
{
this.selenium.stop();
}
private List<AbstractXWikiTestCase> getTests(Test test)
{
List<AbstractXWikiTestCase> tests = new ArrayList<AbstractXWikiTestCase>();
if (TestSuite.class.isAssignableFrom(test.getClass())) {
TestSuite suite = (TestSuite) test;
Enumeration nestedTests = suite.tests();
while (nestedTests.hasMoreElements()) {
tests.addAll(getTests((Test) nestedTests.nextElement()));
}
} else if (TestSetup.class.isAssignableFrom(test.getClass())) {
TestSetup setup = (TestSetup) test;
tests.addAll(getTests(setup.getTest()));
} else if (AbstractXWikiTestCase.class.isAssignableFrom(test.getClass())) {
tests.add((AbstractXWikiTestCase) test);
}
return tests;
}
}
|
package com.eaglesakura.android.framework.support.ui;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.KeyEvent;
import com.eaglesakura.android.framework.support.ui.playservice.GoogleApiClientToken;
import com.eaglesakura.android.framework.support.ui.playservice.GoogleApiTask;
import com.eaglesakura.android.util.ContextUtil;
import org.androidannotations.annotations.AfterViews;
import org.androidannotations.annotations.EActivity;
import org.androidannotations.annotations.InstanceState;
import org.androidannotations.annotations.UiThread;
import java.util.List;
@EActivity
public abstract class BaseActivity extends ActionBarActivity implements FragmentChooser.Callback {
protected UserNotificationController userNotificationController = new UserNotificationController(this);
protected GoogleApiClientToken googleApiClientToken;
protected boolean resumed;
protected BaseActivity() {
fragments.setCallback(this);
}
public void setGoogleApiClientToken(GoogleApiClientToken googleApiClientToken) {
if (this.googleApiClientToken != null) {
this.googleApiClientToken.unlock();
}
this.googleApiClientToken = googleApiClientToken;
if (this.googleApiClientToken != null) {
this.googleApiClientToken.lock();
}
}
public GoogleApiClientToken getGoogleApiClientToken() {
return googleApiClientToken;
}
@Override
protected void onStart() {
super.onStart();
if (googleApiClientToken != null) {
googleApiClientToken.startInitialConnect();
}
}
@Override
protected void onResume() {
super.onResume();
resumed = true;
}
@Override
protected void onPause() {
super.onPause();
resumed = false;
}
public boolean isResumed() {
return resumed;
}
@Override
protected void onStop() {
super.onStop();
if (googleApiClientToken != null) {
googleApiClientToken.unlock();
}
}
@AfterViews
protected void onAfterViews() {
}
protected void log(String fmt, Object... args) {
Log.i(((Object) this).getClass().getSimpleName(), String.format(fmt, args));
}
protected void logi(String fmt, Object... args) {
Log.i(((Object) this).getClass().getSimpleName(), String.format(fmt, args));
}
protected void logd(String fmt, Object... args) {
Log.d(((Object) this).getClass().getSimpleName(), String.format(fmt, args));
}
public UserNotificationController getUserNotificationController() {
return userNotificationController;
}
@UiThread
protected void toast(String fmt, Object... args) {
userNotificationController.toast(this, String.format(fmt, args));
}
/**
* show toast
*
* @param resId
*/
protected void toast(int resId) {
toast(getString(resId));
}
/**
* show progress dialog
*
* @param stringId
*/
public void pushProgress(int stringId) {
pushProgress(getString(stringId));
}
/**
* true
*
* @return
*/
public boolean isProgressing() {
return userNotificationController.isProgressing();
}
/**
*
*
* @param message
*/
@UiThread
public void pushProgress(String message) {
userNotificationController.pushProgress(this, message);
}
@UiThread
public void popProgress() {
userNotificationController.popProgress(this);
}
@InstanceState
protected FragmentChooser fragments = new FragmentChooser();
/**
* Fragment
* <p/>
* Fragment
*
* @param fragment
*/
@Override
public void onAttachFragment(Fragment fragment) {
super.onAttachFragment(fragment);
fragments.compact();
fragments.addFragment(FragmentChooser.ReferenceType.Weak, fragment, fragment.getTag(), 0);
}
/**
*
*
* @param event
* @return true
*/
protected boolean handleFragmentsKeyEvent(KeyEvent event) {
if (!ContextUtil.isBackKeyEvent(event)) {
return false;
}
List<Fragment> list = fragments.listExistFragments();
for (Fragment frag : list) {
if (frag.isVisible() && frag instanceof BaseFragment) {
if (((BaseFragment) frag).handleBackButton()) {
return true;
}
}
}
return false;
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
if (handleFragmentsKeyEvent(event)) {
return true;
}
return super.dispatchKeyEvent(event);
}
@Override
public FragmentManager getFragmentManager(FragmentChooser chooser) {
return getSupportFragmentManager();
}
@Override
public Fragment newFragment(FragmentChooser chooser, String requestTag) {
return null;
}
@Override
public boolean isFragmentExist(FragmentChooser chooser, Fragment fragment) {
if (fragment == null) {
return false;
}
if (fragment instanceof BaseFragment) {
if (((BaseFragment) fragment).isDestroyed()) {
return false;
}
}
return true;
}
/**
* Google Api
* <p/>
*
*
* @param task
*/
public <T> T executeGoogleApi(final GoogleApiTask<T> task) {
return googleApiClientToken.executeGoogleApi(task);
}
}
|
package org.eclipse.birt.report.engine.data.dte;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import org.eclipse.birt.core.archive.IDocArchiveReader;
import org.eclipse.birt.core.archive.IDocArchiveWriter;
import org.eclipse.birt.core.exception.BirtException;
import org.eclipse.birt.core.script.ScriptContext;
import org.eclipse.birt.data.engine.api.DataEngineContext;
import org.eclipse.birt.data.engine.api.IBasePreparedQuery;
import org.eclipse.birt.data.engine.api.IBaseQueryResults;
import org.eclipse.birt.data.engine.api.IDataQueryDefinition;
import org.eclipse.birt.data.engine.api.IQueryDefinition;
import org.eclipse.birt.data.engine.api.IQueryResults;
import org.eclipse.birt.data.engine.api.querydefn.QueryDefinition;
import org.eclipse.birt.data.engine.olap.api.ICubeQueryResults;
import org.eclipse.birt.data.engine.olap.api.query.ICubeQueryDefinition;
import org.eclipse.birt.report.data.adapter.api.DataRequestSession;
import org.eclipse.birt.report.data.adapter.api.DataSessionContext;
import org.eclipse.birt.report.engine.api.EngineException;
import org.eclipse.birt.report.engine.api.impl.ReportDocumentConstants;
import org.eclipse.birt.report.engine.data.DataEngineFactory;
import org.eclipse.birt.report.engine.executor.EngineExtensionManager;
import org.eclipse.birt.report.engine.executor.ExecutionContext;
import org.eclipse.birt.report.engine.extension.IBaseResultSet;
import org.eclipse.birt.report.engine.extension.engine.IDataExtension;
import org.eclipse.birt.report.engine.ir.Report;
public class DataInteractiveEngine extends AbstractDataEngine
{
/**
* output stream used to save the resultset relations
*/
protected DataOutputStream dos;
protected List<String[]> newMetaInfo = new ArrayList<String[]>();
protected List<String[]> metaInfo = new ArrayList<String[]>( );
protected IBaseResultSet[] reportletResults;
protected IDocArchiveWriter writer;
protected IDocArchiveReader reader;
public DataInteractiveEngine( DataEngineFactory factory,
ExecutionContext context, IDocArchiveReader reader,
IDocArchiveWriter writer ) throws Exception
{
super( factory, context );
this.writer = writer;
this.reader = reader;
// create the DteData session.
DataSessionContext dteSessionContext = new DataSessionContext(
DataSessionContext.MODE_UPDATE, context.getDesign( ), context
.getScriptContext( ), context
.getApplicationClassLoader( ) );
dteSessionContext.setDocumentReader( reader );
dteSessionContext.setDocumentWriter( writer );
dteSessionContext.setAppContext( context.getAppContext( ) );
DataEngineContext dteEngineContext = dteSessionContext
.getDataEngineContext( );
dteEngineContext.setLocale( context.getLocale( ) );
dteEngineContext.setTimeZone( context.getTimeZone( ) );
String tempDir = getTempDir( context );
if ( tempDir != null )
{
dteEngineContext.setTmpdir( tempDir );
}
dteSession = DataRequestSession.newSession( dteSessionContext );
initialize();
}
protected void initialize() throws Exception
{
loadDteMetaInfo( reader );
if ( writer != null && dos == null )
{
dos = new DataOutputStream(
writer
.createRandomAccessStream( ReportDocumentConstants.DATA_SNAP_META_STREAM ) );
// dos = new DataOutputStream( writer.createRandomAccessStream(
// ReportDocumentConstants.DATA_META_STREAM ) );
DteMetaInfoIOUtil.startMetaInfo( dos );
}
}
protected void updateMetaInfo( )
{
for ( int i = 0; i < newMetaInfo.size( ); i++ )
{
String[] info = newMetaInfo.get( i );
String pRsetId = info[0];
String rawId = info[1];
String queryId = info[2];
String rsetId = info[3];
String rowId = info[4];
removeMetaInfo( pRsetId, queryId, rsetId );
}
for ( int i = 0; i < metaInfo.size( ); i++ )
{
String[] info = metaInfo.get( i );
storeDteMetaInfo( info[0], info[1], info[2], info[3], info[4] );
}
newMetaInfo.clear( );
metaInfo.clear( );
}
protected void removeMetaInfo(String parendId, String queryId, String rsetId)
{
Iterator<String[]> iter = metaInfo.iterator( );
while ( iter.hasNext( ) )
{
String[] info = iter.next( );
String pId = info[0];
String qId = info[2];
String rsId = info[3];
if ( queryId.equals( qId ) && equals( rsetId, rsId )
&& equals( parendId, pId ) )
{
iter.remove( );
}
}
}
protected boolean equals( String orginal, String destination )
{
if ( orginal == null )
{
if ( destination == null )
{
return true;
}
return false;
}
else
{
return orginal.equals( destination );
}
}
protected void removeMetaInfo( String parentId, String queryId )
{
ArrayList<String> rsets = new ArrayList<String>( );
Iterator<String[]> iter = metaInfo.iterator( );
while ( iter.hasNext( ) )
{
String[] info = iter.next( );
String pRsetId = info[0];
String qId = info[2];
String rsetId = info[3];
if ( queryId.equals( qId )
&& ( parentId == null && pRsetId == null || ( ( parentId != null ) && parentId
.equals( pRsetId ) ) ) )
{
iter.remove( );
rsets.add( rsetId );
}
}
while ( rsets.size( ) > 0 )
{
ArrayList<String> temp = new ArrayList<String>( );
for ( int i = 0; i < rsets.size( ); i++ )
{
temp.addAll( removeMetaInfo( rsets.get( i ) ) );
}
rsets = temp;
}
}
protected List<String> removeMetaInfo( String queryId )
{
ArrayList<String> rsets = new ArrayList<String>( );
Iterator<String[]> iter = metaInfo.iterator( );
while ( iter.hasNext( ) )
{
String[] info = iter.next( );
String pRsetId = info[0];
if ( queryId.equals( pRsetId ) )
{
iter.remove( );
rsets.add( info[3] );
}
}
return rsets;
}
/**
* save the metadata into the streams.
*
* @param key
*/
private void storeDteMetaInfo( String pRsetId, String rawId,
String queryId, String rsetId, String rowId )
{
if ( dos != null )
{
try
{
// save the meta infomation
if ( context.isExecutingMasterPage( ) )
{
if ( pRsetId == null )
{
rawId = "-1";
}
}
DteMetaInfoIOUtil.storeMetaInfo( dos, pRsetId, rawId, queryId,
rsetId, rowId );
newMetaInfo.add( new String[]{pRsetId, rawId, queryId,
rsetId, rowId } );
}
catch ( IOException e )
{
logger.log( Level.SEVERE, e.getMessage( ) );
}
}
}
protected void loadDteMetaInfo( IDocArchiveReader reader ) throws IOException
{
metaInfo = DteMetaInfoIOUtil.loadDteMetaInfo( reader );
if ( metaInfo != null )
{
for ( int i = 0; i < metaInfo.size( ); i++ )
{
String[] rsetRelation = (String[]) metaInfo.get( i );
String pRsetId = rsetRelation[0];
String rowId = rsetRelation[1];
String queryId = rsetRelation[2];
String rsetId = rsetRelation[3];
addResultSetRelation( pRsetId, rowId, queryId, rsetId );
}
}
}
public String getResultIDByRowID( String pRsetId, String rawId,
String queryId )
{
// TODO: not support
return null;
}
protected void doPrepareQuery( Report report, Map appContext )
{
this.appContext = appContext;
// prepare report queries
queryIDMap.putAll( report.getQueryIDs( ) );
}
/**
* For a report with following group/data structure
* <table border=solid>
* <tr><td>group NO.</td><td>raw id</td><td>row id</td><td>data</td><td>sub/nested result set</td></tr>
* <tr><td rowspan=2>1</td><td>0</td><td>0</td><td>1</td><td>QuRs1</td></tr>
* <tr><td>1</td><td>1</td><td>2</td><td></td></tr>
* <tr><td rowspan=2>2</td><td>2</td><td>2</td><td>3</td><td>QuRs2</td></tr>
* <tr><td>3</td><td>3</td><td>4</td><td></td></tr>
* </table>
* <br/>
* The indices for result sets are saved in ResultSetIndex as:
* <table border=solid>
* <tr><td>raw id</td><td>sub/nested result set</td></tr>
* <tr><td>0</td><td>QuRs0</td></tr>
* <tr><td>2</td><td>QuRs1</td></tr>
* </table>
* <br/>
* If data column is sorted as descending, the data structure changed to:
* <table border=solid>
* <tr><td>group NO.</td><td>raw id</td><td>row id</td><td>data</td><td>sub/nested result set</td></tr>
* <tr><td rowspan=2>1</td><td>1</td><td>0</td><td>2</td><td>QuRs1</td></tr>
* <tr><td>0</td><td>1</td><td>1</td><td></td></tr>
* <tr><td rowspan=2>2</td><td>3</td><td>2</td><td>4</td><td>QuRs2</td></tr>
* <tr><td>2</td><td>3</td><td>3</td><td></td></tr>
* </table>
* The result set indices should keep same as before change because the algorithm for result set searching uses
* raw id and the result set for the max row which is less than or equals to the searching id is returned.
* , i.e, if the raw id is 0, QuRs0 is returned, if raw id is 1, QuRs0(for 0) is returned.
* <br/>
* Following indices are incorrect because the result set for raw id 0 and 2 would be incorrect.
* <table border=solid>
* <tr><td>raw id</td><td>sub/nested result set</td></tr>
* <tr><td>1</td><td>QuRs0</td></tr>
* <tr><td>3</td><td>QuRs1</td></tr>
* </table>
*/
protected IBaseResultSet doExecuteQuery( IBaseResultSet parentResult,
IQueryDefinition query, Object queryOwner, boolean useCache ) throws BirtException
{
String queryID = (String) queryIDMap.get( query );
IBaseQueryResults parentQueryResults = null;
if ( parentResult != null )
{
parentQueryResults = parentResult.getQueryResults( );
}
String[] resultIdAndRawId = loadResultSetID( parentResult, queryID );
String resultSetID = null, originalRawId = "-1";
if ( resultIdAndRawId != null )
{
resultSetID = resultIdAndRawId[0];
originalRawId = resultIdAndRawId[1];
}
// in update mode, resultsetid isn't a must
/*
if ( resultSetID == null )
{
throw new EngineException(MessageConstants.REPORT_QUERY_LOADING_ERROR , query.getClass( ).getName( ) );
}
*/
// Interactive do not support CUBE?
if ( !context.needRefreshData( ) )
{
( (QueryDefinition) query ).setQueryResultsID( resultSetID );
}
else
{
//should remove the original meta info
removeMetaInfo( parentQueryResults == null
? null
: parentQueryResults.getID( ), queryID, resultSetID );
}
// invoke the engine extension to process the queries
processQueryExtensions( query );
String pRsetId = null; // id of the parent query restuls
String rawId = "-1"; // row id of the parent query results
String rowId = "-1";
IBaseQueryResults dteResults = null; // the dteResults of this query
QueryResultSet resultSet = null;
boolean needExecute = queryCache.needExecute( query, queryOwner, useCache );
if ( parentQueryResults == null )
{
// this is the root query
if ( !needExecute )
{
dteResults = getCachedQueryResult( query, parentResult );
}
if ( dteResults == null )
{
IBasePreparedQuery pQuery = dteSession.prepare( query );
dteResults = dteSession.execute( pQuery, null, context.getScriptContext( ) );
putCachedQueryResult( query, dteResults.getID( ) );
}
resultSet = new QueryResultSet( this, context,
query,
(IQueryResults) dteResults );
}
else
{
if ( parentResult instanceof QueryResultSet )
{
pRsetId = ( (QueryResultSet) parentResult ).getQueryResultsID( );
rowId = String.valueOf( ( (QueryResultSet) parentResult )
.getRowIndex( ) );
}
else
{
pRsetId = ( (CubeResultSet) parentResult ).getQueryResultsID( );
rowId = ( (CubeResultSet) parentResult ).getCellIndex( );
}
rawId = parentResult.getRawID( );
// this is the nest query, execute the query in the
// parent results
if ( !needExecute )
{
dteResults = getCachedQueryResult( query, parentResult );
}
if ( dteResults == null )
{
IBasePreparedQuery pQuery = dteSession.prepare( query );
dteResults = dteSession.execute( pQuery, parentQueryResults, context.getScriptContext( ) );
putCachedQueryResult( query, dteResults.getID( ) );
}
resultSet = new QueryResultSet( this, context, parentResult,
(IQueryDefinition) query,
(IQueryResults) dteResults );
}
// see DteResultSet
resultSet.setBaseRSetID( resultSetID );
storeDteMetaInfo( pRsetId, originalRawId, queryID, dteResults.getID( ), rowId );
return resultSet;
}
protected void processQueryExtensions( IDataQueryDefinition query )
throws EngineException
{
String[] extensions = context.getEngineExtensions( );
if ( extensions != null )
{
EngineExtensionManager manager = context
.getEngineExtensionManager( );
for ( String extensionName : extensions )
{
IDataExtension extension = manager
.getDataExtension( extensionName );
if ( extension != null )
{
extension.prepareQuery( query );
}
}
}
}
protected IBaseResultSet doExecuteCube( IBaseResultSet parentResult,
ICubeQueryDefinition query, Object queryOwner, boolean useCache ) throws BirtException
{
String queryID = (String) queryIDMap.get( query );
IBaseQueryResults parentQueryResults = null;
if ( parentResult != null )
{
parentQueryResults = parentResult.getQueryResults( );
}
String[] resultIdAndRawId = loadResultSetID( parentResult, queryID );
String resultSetID = null, originalRawId = "-1";
if ( resultIdAndRawId != null )
{
resultSetID = resultIdAndRawId[0];
originalRawId = resultIdAndRawId[1];
}
// in update mode, resultsetid isn't a must
/*
if ( resultSetID == null )
{
throw new EngineException(MessageConstants.REPORT_QUERY_LOADING_ERROR , queryID);
}
*/
if ( useCache )
{
String rsetId = String.valueOf( cachedQueryToResults.get( query ) );
query.setQueryResultsID( rsetId );
}
else
{
query.setQueryResultsID( null );
}
// Interactive do not support CUBE?
if(!context.needRefreshData( ))
{
query.setQueryResultsID( resultSetID );
}
else
{
query.setQueryResultsID( null );
//should remove the original meta info
removeMetaInfo( parentQueryResults == null
? null
: parentQueryResults.getID( ), queryID, resultSetID );
}
IBasePreparedQuery pQuery = dteSession.prepare( query, appContext );
String pRsetId = null; // id of the parent query restuls
String rawId = "-1"; // row id of the parent query results
String rowId = "-1";
IBaseQueryResults dteResults; // the dteResults of this query
CubeResultSet resultSet = null;
ScriptContext scriptContext = context.getScriptContext( );
if ( parentQueryResults == null )
{
// this is the root query
dteResults = dteSession.execute( pQuery, null, scriptContext );
resultSet = new CubeResultSet( this, context, query,
(ICubeQueryResults) dteResults );
}
else
{
if ( parentResult instanceof QueryResultSet )
{
pRsetId = ( (QueryResultSet) parentResult ).getQueryResultsID( );
rowId = String.valueOf( ( (QueryResultSet) parentResult )
.getRowIndex( ) );
}
else
{
pRsetId = ( (CubeResultSet) parentResult ).getQueryResultsID( );
rowId = ( (CubeResultSet) parentResult ).getCellIndex( );
}
rawId = parentResult.getRawID( );
// this is the nest query, execute the query in the
// parent results
dteResults = dteSession.execute( pQuery, parentQueryResults,
scriptContext );
CubeResultSet cubeResultSet = new CubeResultSet( this,
context,
parentResult,
query,
(ICubeQueryResults) dteResults );
if ( cubeResultSet.getCubeCursor( ) == null )
{
resultSet = null;
}
else
{
resultSet = cubeResultSet;
}
}
// FIXME:
// resultSet.setBaseRSetID( resultSetID );
storeDteMetaInfo( pRsetId, originalRawId, queryID, dteResults.getID( ), rowId );
// persist the queryResults witch need cached.
if ( query.cacheQueryResults( ) )
{
cachedQueryToResults.put( query, dteResults.getID( ) );
}
return resultSet;
}
private String[] loadResultSetID( IBaseResultSet parentResult, String queryID )
throws BirtException
{
String[] result = null;
if ( parentResult == null )
{
// if the query is used in master page, the row id is set as page
// number
if ( context.isExecutingMasterPage( ) )
{
result = getResultIDWithRawId( null, "-1", queryID );
if ( result == null )
{
long pageNumber = context.getPageNumber( );
result = getResultIDWithRawId( null, String
.valueOf( pageNumber ), queryID );
if ( result == null )
{
// try to find the query defined in page 1
result = getResultIDWithRawId( null, "1", queryID );
}
}
}
else
{
result = getResultIDWithRawId( null, "-1", queryID );
}
}
else
{
String pRsetId;
if ( parentResult instanceof QueryResultSet )
{
pRsetId = ( (QueryResultSet) parentResult )
.getQueryResultsID( );
}
else
{
pRsetId = ( (CubeResultSet) parentResult )
.getQueryResultsID( );
}
String rowid = parentResult.getRawID( );
result = getResultIDWithRawId( pRsetId, rowid, queryID );
}
return result;
}
public void shutdown( )
{
updateMetaInfo();
if ( null != dos )
{
try
{
dos.close( );
}
catch ( IOException e )
{
}
dos = null;
}
dteSession.shutdown( );
}
}
|
package es.tid.bdp.sftp.server.filesystem.hadoop;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.sshd.server.SshFile;
import org.apache.sshd.server.filesystem.NativeSshFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.hadoop.compression.lzo.LzopCodec;
import es.tid.bdp.utils.FileSystemControllerAbstract;
import es.tid.bdp.utils.data.BdpFileDescriptor;
import es.tid.bdp.utils.io.output.ProtoBufOutStream;
import es.tid.bdp.utils.parse.ParserAbstract;
/**
*
* @author rgc
*
*/
public class HdfsSshFile implements SshFile {
private final Logger LOG = LoggerFactory.getLogger(NativeSshFile.class);
// the file name with respect to the user root.
// The path separator character will be '/' and
// it will always begin with '/'.
private String fileName;
private String userName;
private Path path;
private FileSystemControllerAbstract hfdsCtrl;
private FileSystem hdfs;
private OutputStream outputStream;
private BdpFileDescriptor descriptor;
public HdfsSshFile(final FileSystem hdfs,
final FileSystemControllerAbstract hfdsCtrl, final String userName,
final String fileName) {
if (hdfs == null) {
LOG.error("Hdfs can not be null");
throw new IllegalArgumentException("Hdfs can not be null");
}
if (fileName == null) {
LOG.error("fileName can not be null");
throw new IllegalArgumentException("fileName can not be null");
}
if (fileName.length() == 0) {
LOG.error("fileName can not be empty");
throw new IllegalArgumentException("fileName can not be empty");
} else if (fileName.charAt(0) != '/') {
LOG.error("fileName must be an absolute path");
throw new IllegalArgumentException(
"fileName must be an absolute path");
}
this.descriptor = hfdsCtrl.build(userName, fileName);
this.fileName = fileName;
this.path = new Path(fileName);
this.hdfs = hdfs;
this.hfdsCtrl = hfdsCtrl;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#getAbsolutePath()
*/
@Override
public String getAbsolutePath() {
// strip the last '/' if necessary
String fullName = fileName;
int filelen = fullName.length();
if ((filelen != 1) && (fullName.charAt(filelen - 1) == '/')) {
fullName = fullName.substring(0, filelen - 1);
}
return fullName;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#getName()
*/
@Override
public String getName() {
// root - the short name will be '/'
if (fileName.equals("/")) {
return "/";
}
// strip the last '/'
String shortName = fileName;
int filelen = fileName.length();
if (shortName.charAt(filelen - 1) == '/') {
shortName = shortName.substring(0, filelen - 1);
}
// return from the last '/'
int slashIndex = shortName.lastIndexOf('/');
if (slashIndex != -1) {
shortName = shortName.substring(slashIndex + 1);
}
return shortName;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#isDirectory()
*/
@Override
public boolean isDirectory() {
try {
return this.hdfs.getFileStatus(path).isDir();
} catch (IOException e) {
LOG.error("Error int he access to HDFS");
throw new RuntimeException(e);
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#isFile()
*/
@Override
public boolean isFile() {
try {
return !this.hdfs.getFileStatus(path).isDir();
} catch (IOException e) {
LOG.error("Error int he access to HDFS");
throw new RuntimeException(e);
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#doesExist()
*/
@Override
public boolean doesExist() {
try {
return hdfs.exists(path);
} catch (IOException e) {
LOG.error("Error int he access to HDFS");
throw new RuntimeException(e);
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#isReadable()
*/
@Override
public boolean isReadable() {
return descriptor.isReadable();
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#isWritable()
*/
@Override
public boolean isWritable() {
return descriptor.isWritable();
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#isExecutable()
*/
@Override
public boolean isExecutable() {
// there are no files executables in HDFS system
return false;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#isRemovable()
*/
@Override
public boolean isRemovable() {
// TODO rgc: Not implemented
throw new RuntimeException("isExecutable is not implemented");
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#getParentFile()
*/
@Override
public SshFile getParentFile() {
int indexOfSlash = getAbsolutePath().lastIndexOf('/');
String parentFullName;
if (indexOfSlash == 0) {
parentFullName = "/";
} else {
parentFullName = getAbsolutePath().substring(0, indexOfSlash);
}
return new HdfsSshFile(hdfs, hfdsCtrl, userName, parentFullName);
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#getLastModified()
*/
@Override
public long getLastModified() {
try {
FileStatus fileStatus = hdfs.getFileStatus(path);
return fileStatus.getModificationTime();
} catch (Exception e) {
LOG.error("Error int he access to HDFS");
throw new RuntimeException(e);
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#setLastModified(long)
*/
@Override
public boolean setLastModified(long time) {
try {
hdfs.setTimes(path, time, time);
return true;
} catch (Exception e) {
return false;
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#getSize()
*/
@Override
public long getSize() {
try {
FileStatus fileStatus = hdfs.getFileStatus(path);
return fileStatus.getLen();
} catch (Exception e) {
LOG.error("Error int he access to HDFS");
throw new RuntimeException(e);
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#mkdir()
*/
@Override
public boolean mkdir() {
boolean retVal = false;
if (isWritable()) {
try {
retVal = hdfs.mkdirs(path);
} catch (IOException e) {
LOG.error("Error int he access to HDFS");
throw new RuntimeException(e);
}
}
return retVal;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#delete()
*/
@Override
public boolean delete() {
try {
return hdfs.delete(path, true);
} catch (Exception e) {
return false;
}
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#create()
*/
@Override
public boolean create() throws IOException {
outputStream = hdfs.create(path);
return true;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#truncate()
*/
@Override
public void truncate() throws IOException {
// TODO rgc: Not implemented
throw new RuntimeException("truncate is not implemented");
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#move(org.apache.sshd.server.SshFile)
*/
@Override
public boolean move(SshFile destination) {
// TODO rgc: Not implemented
throw new RuntimeException("move is not implemented");
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#listSshFiles()
*/
@Override
public List<SshFile> listSshFiles() {
// is a directory
if (!isDirectory()) {
return Collections.unmodifiableList(new ArrayList<SshFile>());
}
// directory - return all the files
FileStatus[] elements;
try {
elements = hdfs.listStatus(path);
} catch (IOException e) {
throw new RuntimeException(e);
}
if (elements == null) {
return Collections.unmodifiableList(new ArrayList<SshFile>());
}
// make sure the files are returned in order
Arrays.sort(elements, new Comparator<FileStatus>() {
public int compare(FileStatus f1, FileStatus f2) {
return f1.getPath().getName().compareTo(f2.getPath().getName());
}
});
// get the virtual name of the base directory
String virtualFileStr = getAbsolutePath();
if (virtualFileStr.charAt(virtualFileStr.length() - 1) != '/') {
virtualFileStr += '/';
}
// now return all the files under the directory
SshFile[] virtualFiles = new SshFile[elements.length];
for (int i = 0; i < elements.length; ++i) {
Path fileObj = elements[i].getPath();
String fileName = virtualFileStr + fileObj.getName();
virtualFiles[i] = new HdfsSshFile(hdfs, hfdsCtrl, userName,
fileName);
}
return Collections.unmodifiableList(Arrays.asList(virtualFiles));
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#createOutputStream(long)
*/
@Override
public OutputStream createOutputStream(long offset) throws IOException {
if (offset > 0) {
throw new RuntimeException();
}
if (this.outputStream == null) {
create();
}
if (this.descriptor.isCompressible()) {
LzopCodec codec = new LzopCodec();
codec.setConf(hdfs.getConf());
outputStream = codec.createOutputStream(outputStream);
}
ParserAbstract parser = descriptor.getParser();
if (parser != null) {
outputStream = new ProtoBufOutStream(outputStream, parser);
}
return outputStream;
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#createInputStream(long)
*/
@Override
public InputStream createInputStream(long offset) throws IOException {
if (offset > 0) {
throw new RuntimeException();
}
return hdfs.open(path);
}
/*
* (non-Javadoc)
*
* @see org.apache.sshd.server.SshFile#handleClose()
*/
@Override
public void handleClose() throws IOException {
// TODO rgc
}
}
|
package mil.nga.giat.geowave.datastore.accumulo.cli;
import java.io.IOException;
import mil.nga.giat.geowave.core.cli.CLIOperationDriver;
import mil.nga.giat.geowave.core.index.ByteArrayId;
import mil.nga.giat.geowave.core.store.DataStore;
import mil.nga.giat.geowave.core.store.adapter.DataAdapter;
import mil.nga.giat.geowave.core.store.index.IndexStore;
import mil.nga.giat.geowave.datastore.accumulo.AccumuloCommandLineOptions;
import mil.nga.giat.geowave.datastore.accumulo.AccumuloDataStore;
import mil.nga.giat.geowave.datastore.accumulo.AccumuloOperations;
import mil.nga.giat.geowave.datastore.accumulo.AccumuloOptions;
import mil.nga.giat.geowave.datastore.accumulo.metadata.AccumuloAdapterStore;
import mil.nga.giat.geowave.datastore.accumulo.metadata.AccumuloDataStatisticsStore;
import mil.nga.giat.geowave.datastore.accumulo.metadata.AccumuloIndexStore;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.log4j.Logger;
/**
*
* Simple command line tool to recalculate statistics for an adapter.
*
*/
public abstract class StatsOperation implements
CLIOperationDriver
{
protected static final Logger LOGGER = Logger.getLogger(StatsOperation.class);
public boolean runOperation(
final AccumuloOperations accumuloOperations,
final ByteArrayId adapterId,
final String[] authorizations )
throws IOException {
final AccumuloOptions accumuloOptions = new AccumuloOptions();
accumuloOptions.setPersistDataStatistics(true);
final AccumuloDataStore dataStore = new AccumuloDataStore(
accumuloOperations,
accumuloOptions);
final AccumuloAdapterStore adapterStore = new AccumuloAdapterStore(
accumuloOperations);
final AccumuloIndexStore indexStore = new AccumuloIndexStore(
accumuloOperations);
final AccumuloDataStatisticsStore statsStore = new AccumuloDataStatisticsStore(
accumuloOperations);
DataAdapter<?> adapter = null;
if (adapterId != null) {
adapterStore.getAdapter(adapterId);
if (adapter == null) {
LOGGER.error("Unknown adapter " + adapterId);
return false;
}
}
return doWork(
statsStore,
dataStore,
indexStore,
adapter,
authorizations);
}
public abstract boolean doWork(
AccumuloDataStatisticsStore statsStore,
DataStore dataStore,
IndexStore indexStore,
DataAdapter<?> adapter,
String[] authorizations );
private static String[] getAuthorizations(
final String auths ) {
if ((auths == null) || (auths.length() == 0)) {
return new String[0];
}
final String[] authsArray = auths.split(",");
for (int i = 0; i < authsArray.length; i++) {
authsArray[i] = authsArray[i].trim();
}
return authsArray;
}
/** Is an adapter type/id required? */
protected boolean isTypeRequired() {
return false;
}
@Override
public void run(
final String[] args )
throws ParseException {
try {
final Options allOptions = new Options();
AccumuloCommandLineOptions.applyOptions(allOptions);
StatsCommandLineOptions.applyOptions(
allOptions,
isTypeRequired());
final BasicParser parser = new BasicParser();
try {
final CommandLine commandLine = parser.parse(
allOptions,
args);
AccumuloCommandLineOptions accumuloOperations;
accumuloOperations = AccumuloCommandLineOptions.parseOptions(commandLine);
final StatsCommandLineOptions statsOperations = StatsCommandLineOptions.parseOptions(commandLine);
runOperation(
accumuloOperations.getAccumuloOperations(),
statsOperations.getTypeName() != null ? new ByteArrayId(
statsOperations.getTypeName()) : null,
getAuthorizations(statsOperations.getAuthorizations()));
}
catch (final ParseException e) {
LOGGER.error(
"Unable to parse stats tool arguments",
e);
}
}
catch (AccumuloException | AccumuloSecurityException | IOException e) {
LOGGER.error(
"Error while calculating statistics.",
e);
}
}
}
|
package com.dtflys.forest.backend.httpclient.response;
import com.dtflys.forest.exceptions.ForestRuntimeException;
import com.dtflys.forest.http.ForestRequest;
import com.dtflys.forest.http.ForestResponse;
import com.dtflys.forest.http.ForestResponseFactory;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.ProtocolVersion;
import org.apache.http.message.BasicHttpResponse;
import org.apache.http.message.BasicStatusLine;
import java.io.IOException;
import java.io.InputStream;
/**
* @author gongjun[jun.gong@thebeastshop.com]
* @since 2017-05-12 17:07
*/
public class HttpclientForestResponseFactory implements ForestResponseFactory<HttpResponse> {
private String responseContent;
private volatile ForestResponse resultResponse;
private String getStringContent(String encode, HttpEntity entity) throws IOException {
if (responseContent == null) {
InputStream inputStream = entity.getContent();
responseContent = IOUtils.toString(inputStream, encode);
}
return responseContent;
}
@Override
public synchronized ForestResponse createResponse(ForestRequest request, HttpResponse httpResponse) {
if (resultResponse != null) {
return resultResponse;
}
if (httpResponse == null) {
httpResponse = new BasicHttpResponse(
new BasicStatusLine(
new ProtocolVersion("1.1", 1, 1), 404, ""));
}
HttpclientForestResponse response = new HttpclientForestResponse(request, httpResponse);
// int statusCode = httpResponse.getStatusLine().getStatusCode();
// response.setStatusCode(statusCode);
// httpResponse.getAllHeaders();
// HttpEntity entity = response.getHttpResponse().getEntity();
// if (entity != null) {
// try {
// String responseText = getStringContent(request.getResponseEncode(), entity);
// response.setContent(responseText);
// } catch (IOException e) {
// throw new ForestRuntimeException(e);
this.resultResponse = response;
return response;
}
}
|
package org.eclipse.persistence.internal.oxm;
import java.util.Iterator;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
import javax.xml.namespace.QName;
import org.eclipse.persistence.descriptors.ClassDescriptor;
import org.eclipse.persistence.descriptors.InheritancePolicy;
import org.eclipse.persistence.exceptions.DescriptorException;
import org.eclipse.persistence.internal.helper.DatabaseField;
import org.eclipse.persistence.internal.helper.DatabaseTable;
import org.eclipse.persistence.internal.helper.Helper;
import org.eclipse.persistence.internal.sessions.AbstractRecord;
import org.eclipse.persistence.internal.sessions.AbstractSession;
import org.eclipse.persistence.oxm.NamespaceResolver;
import org.eclipse.persistence.oxm.XMLConstants;
import org.eclipse.persistence.oxm.XMLField;
import org.eclipse.persistence.oxm.record.XMLRecord;
/**
* INTERNAL:
* <p><b>Purpose</b>: A Subclass of Inheritance Policy to be used with XML
* Descriptors. If the class indicator field is an xsi:type, the value of that
* field may be a qualified type name. For example xsi:type="myns:my-type-name".
* Since any given XML document can use different prefixes for these namespaces,
* we must be able to find the class based on QName instead of just the string
* "myns:my-type-name".</p>
* @author mmacivor
* @since 10.1.3
*/
public class QNameInheritancePolicy extends InheritancePolicy {
//used for initialization. Prefixed type names will be changed to QNames.
private NamespaceResolver namespaceResolver;
private boolean usesXsiType = false;
public QNameInheritancePolicy() {
super();
}
public QNameInheritancePolicy(ClassDescriptor desc) {
super(desc);
}
/**
* Override to control order of uniqueTables, child tablenames should be first since
* getDefaultRootElement on an XMLDescriptor will return the first table.
*/
protected void updateTables(){
// Unique is required because the builder can add the same table many times.
Vector<DatabaseTable> childTables = getDescriptor().getTables();
Vector<DatabaseTable> parentTables = getParentDescriptor().getTables();
Vector<DatabaseTable> uniqueTables = Helper.concatenateUniqueVectors(childTables, parentTables);
getDescriptor().setTables(uniqueTables);
// After filtering out any duplicate tables, set the default table
// if one is not already set. This must be done now before any other
// initialization occurs. In a joined strategy case, the default
// table will be at an index greater than 0. Which is where
// setDefaultTable() assumes it is. Therefore, we need to send the
// actual default table instead.
if (childTables.isEmpty()) {
getDescriptor().setInternalDefaultTable();
} else {
getDescriptor().setInternalDefaultTable(uniqueTables.get(uniqueTables.indexOf(childTables.get(0))));
}
}
/**
* INTERNAL:
* Initialized the inheritance properties of the descriptor once the mappings are initialized.
* This is done before formal postInitialize during the end of mapping initialize.
*/
public void initialize(AbstractSession session) {
super.initialize(session);
// If we have a namespace resolver, check any of the class-indicator values
// for prefixed type names and resolve the namespaces.
if (!this.shouldUseClassNameAsIndicator()){
// Must first clone the map to avoid concurrent modification.
Iterator<Map.Entry> entries = new HashMap(getClassIndicatorMapping()).entrySet().iterator();
while (entries.hasNext()) {
Map.Entry entry = entries.next();
Object key = entry.getKey();
XPathFragment frag = ((XMLField) getClassIndicatorField()).getXPathFragment();
if (frag.getLocalName().equals(XMLConstants.SCHEMA_TYPE_ATTRIBUTE) && frag.getNamespaceURI() != null && frag.getNamespaceURI().equals(XMLConstants.SCHEMA_INSTANCE_URL)) {
usesXsiType = true;
}
if (key instanceof String) {
XPathQName qname;
String indicatorValue = (String) key;
if (!usesXsiType || namespaceResolver == null) {
qname = new XPathQName(indicatorValue, true);
} else {
int index = indicatorValue.indexOf(XMLConstants.COLON);
if (index != -1 && namespaceResolver != null) {
String prefix = indicatorValue.substring(0, index);
String localPart = indicatorValue.substring(index + 1);
String uri = namespaceResolver.resolveNamespacePrefix(prefix);
qname = new XPathQName(uri, localPart, true);
} else {
qname = new XPathQName(namespaceResolver.getDefaultNamespaceURI(), indicatorValue, true);
}
}
getClassIndicatorMapping().put(qname, entry.getValue());
} else if (key instanceof QName) {
XPathQName xpathQName = new XPathQName((QName) key, true);
getClassIndicatorMapping().put(xpathQName, entry.getValue());
}
}
}
//bug 6012173 - changed to initialize namespare uri on indicator field
//need to be able to compare uri and local name during marshal to see if field is xsi type field
if(getClassIndicatorField() != null){
XMLField classIndicatorXMLField;
try {
classIndicatorXMLField = (XMLField)getClassIndicatorField();
} catch (ClassCastException ex) {
classIndicatorXMLField = new XMLField(getClassIndicatorField().getName());
setClassIndicatorField(classIndicatorXMLField);
}
XPathFragment frag = classIndicatorXMLField.getLastXPathFragment();
if ((frag != null) && frag.hasNamespace() && frag.getPrefix() !=null && (namespaceResolver != null)) {
String uri = namespaceResolver.resolveNamespacePrefix(frag.getPrefix());
classIndicatorXMLField.getLastXPathFragment().setNamespaceURI(uri);
}
}
}
/**
* INTERNAL:
* This method is invoked only for the abstract descriptors.
*/
public Class classFromRow(AbstractRecord rowFromDatabase, AbstractSession session) throws DescriptorException {
((XMLRecord) rowFromDatabase).setSession(session);
boolean namespaceAware = ((XMLRecord) rowFromDatabase).isNamespaceAware();
if (hasClassExtractor() || shouldUseClassNameAsIndicator()) {
return super.classFromRow(rowFromDatabase, session);
}
Object indicator = rowFromDatabase.get(getClassIndicatorField());
if (indicator == AbstractRecord.noEntry) {
return null;
}
if (indicator == null) {
return null;
}
Class concreteClass;
if (indicator instanceof String) {
String indicatorValue = (String)indicator;
int index = indicatorValue.indexOf(((XMLRecord)rowFromDatabase).getNamespaceSeparator());
if (index == -1) {
if (namespaceAware && usesXsiType) {
String uri = ((XMLRecord)rowFromDatabase).resolveNamespacePrefix(null);
if (uri == null) {
concreteClass = (Class)this.classIndicatorMapping.get(new XPathQName(((XMLRecord)rowFromDatabase).getNamespaceResolver().getDefaultNamespaceURI() ,indicatorValue, namespaceAware));
} else {
XPathQName qname = new XPathQName(uri, indicatorValue, namespaceAware);
concreteClass = (Class)this.classIndicatorMapping.get(qname);
}
} else {
XPathQName qname = new XPathQName(indicatorValue, namespaceAware);
concreteClass = (Class)this.classIndicatorMapping.get(qname);
}
} else {
String prefix = indicatorValue.substring(0, index);
String localPart = indicatorValue.substring(index + 1);
String uri = ((XMLRecord)rowFromDatabase).resolveNamespacePrefix(prefix);
if (uri != null) {
XPathQName qname = new XPathQName(uri, localPart, namespaceAware);
concreteClass = (Class)this.classIndicatorMapping.get(qname);
} else {
concreteClass = (Class)this.classIndicatorMapping.get(indicatorValue);
}
}
} else {
concreteClass = (Class)this.classIndicatorMapping.get(indicator);
}
if (concreteClass == null) {
throw DescriptorException.missingClassForIndicatorFieldValue(indicator, getDescriptor());
}
return concreteClass;
}
public void setNamespaceResolver(NamespaceResolver resolver) {
this.namespaceResolver = resolver;
}
/**
* PUBLIC:
* To set the class indicator field name.
* This is the name of the field in the table that stores what type of object this is.
*/
public void setClassIndicatorFieldName(String fieldName) {
if (fieldName == null) {
setClassIndicatorField(null);
} else {
setClassIndicatorField(new XMLField(fieldName));
}
}
/**
* INTERNAL:
* Add abstract class indicator information to the database row. This is
* required when building a row for an insert or an update of a concrete child
* descriptor.
*/
public void addClassIndicatorFieldToRow(AbstractRecord databaseRow) {
if (hasClassExtractor()) {
return;
}
DatabaseField field = getClassIndicatorField();
Object value = getClassIndicatorValue();
boolean namespaceAware = ((XMLRecord)databaseRow).isNamespaceAware() || ((XMLRecord)databaseRow).hasCustomNamespaceMapper();
if(!namespaceAware && value instanceof String){
int colonIndex = ((String)value).indexOf(XMLConstants.COLON);
if(colonIndex > -1){
value = ((String)value).substring(colonIndex + 1);
}
}else if(namespaceAware && value instanceof String){
if(((XMLRecord)databaseRow).getNamespaceSeparator() != XMLConstants.COLON){
value= ((String)value).replace(XMLConstants.COLON, ((XMLRecord)databaseRow).getNamespaceSeparator());
}
}
databaseRow.put(field, value);
}
}
|
package org.eclipse.persistence.internal.oxm.record.json;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import org.eclipse.persistence.internal.libraries.antlr.runtime.ANTLRInputStream;
import org.eclipse.persistence.internal.libraries.antlr.runtime.ANTLRReaderStream;
import org.eclipse.persistence.internal.libraries.antlr.runtime.CharStream;
import org.eclipse.persistence.internal.libraries.antlr.runtime.RecognitionException;
import org.eclipse.persistence.internal.libraries.antlr.runtime.TokenRewriteStream;
import org.eclipse.persistence.internal.libraries.antlr.runtime.tree.CommonTree;
import org.eclipse.persistence.internal.libraries.antlr.runtime.tree.Tree;
import org.eclipse.persistence.internal.oxm.record.XMLReaderAdapter;
import org.eclipse.persistence.oxm.XMLConstants;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
public class JSONReader extends XMLReaderAdapter {
private static final String TRUE = "true";
private static final String FALSE = "false";
private Properties properties;
String attributePrefix = null;
public JSONReader(Properties props){
if(props != null){
attributePrefix = props.getProperty("json.attribute.prefix");
if(attributePrefix == ""){
attributePrefix = null;
}
}
}
private JSONAttributes attributes = new JSONAttributes();
@Override
public void parse(InputSource input) throws IOException, SAXException {
try {
CharStream charStream;
InputStream inputStream = null;
if(null != input.getByteStream()) {
charStream = new ANTLRInputStream(input.getByteStream());
} else if (null != input.getCharacterStream()){
charStream = new ANTLRReaderStream(input.getCharacterStream());
} else {
URL url = new URL(input.getSystemId());
inputStream = url.openStream();
charStream = new ANTLRInputStream(inputStream);
}
JSONLexer lexer = new JSONLexer(charStream);
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
JSONParser parser = new JSONParser(tokens);
CommonTree commonTree = (CommonTree) parser.object().getTree();
contentHandler.startDocument();
parseRoot(commonTree);
contentHandler.endDocument();
if(null != inputStream) {
inputStream.close();
}
} catch(RecognitionException e) {
throw new SAXParseException(e.getLocalizedMessage(), input.getPublicId(), input.getSystemId(), e.line, e.index, e);
}
}
private void parseRoot(Tree tree) throws SAXException {
if(tree.getType() == JSONLexer.OBJECT){
int children = tree.getChildCount();
if(children == 1){
parse((CommonTree) tree.getChild(0));
}else{
contentHandler.startElement("", "", null, attributes.setTree(tree, attributePrefix));
for(int x=0, size=tree.getChildCount(); x<size; x++) {
parse((CommonTree) tree.getChild(x));
}
contentHandler.endElement("","", null);
}
}
}
private void parse(Tree tree) throws SAXException {
switch(tree.getType()) {
case JSONLexer.PAIR: {
Tree valueTree = tree.getChild(1);
if(valueTree.getType() == JSONLexer.ARRAY) {
parse(valueTree);
} else {
Tree stringTree = tree.getChild(0);
String localName = stringTree.getText().substring(1, stringTree.getText().length() - 1);
if(attributePrefix != null && localName.startsWith(attributePrefix)){
break;
}else{
contentHandler.startElement("", localName, localName, attributes.setTree(valueTree, attributePrefix));
}
parse(valueTree);
contentHandler.endElement("", localName, localName);
}
break;
}
case JSONLexer.STRING: {
String string = string(tree.getChild(0).getText());
contentHandler.characters(string);
break;
}
case JSONLexer.NUMBER: {
contentHandler.characters(tree.getChild(0).getText());
break;
}
case JSONLexer.TRUE: {
contentHandler.characters(TRUE);
break;
}
case JSONLexer.FALSE: {
contentHandler.characters(FALSE);
break;
}
case JSONLexer.NULL: {
break;
}
case JSONLexer.ARRAY: {
Tree parentStringTree = tree.getParent().getChild(0);
String parentLocalName = parentStringTree.getText().substring(1, parentStringTree.getText().length() - 1);
for(int x=0, size=tree.getChildCount(); x<size; x++) {
CommonTree nextChildTree = (CommonTree) tree.getChild(x);
contentHandler.startElement("", parentLocalName, parentLocalName, attributes.setTree(nextChildTree, attributePrefix));
parse(nextChildTree);
contentHandler.endElement("", parentLocalName, parentLocalName);
}
break;
}
default: {
for(int x=0, size=tree.getChildCount(); x<size; x++) {
parse((CommonTree) tree.getChild(x));
}
}
}
}
private String string(String string) {
string = string.substring(1, string.length() - 1);
string = string.replace("\\\"", "\"");
return string;
}
private static class JSONAttributes extends IndexedAttributeList {
private Tree tree;
private String attributePrefix;
public JSONAttributes setTree(Tree tree, String attributePrefix) {
reset();
this.tree = tree;
this.attributePrefix = attributePrefix;
return this;
}
@Override
protected List<Attribute> attributes() {
if(null == attributes) {
if(tree.getType() == JSONLexer.NULL){
attributes = new ArrayList<Attribute>(1);
attributes.add(new Attribute(XMLConstants.SCHEMA_INSTANCE_URL, XMLConstants.SCHEMA_NIL_ATTRIBUTE, XMLConstants.SCHEMA_NIL_ATTRIBUTE, "true"));
return attributes;
}
if(tree.getType() == JSONLexer.OBJECT) {
attributes = new ArrayList<Attribute>(tree.getChildCount());
for(int x=0; x<tree.getChildCount(); x++) {
Tree childTree = tree.getChild(x);
String attributeLocalName = childTree.getChild(0).getText().substring(1, childTree.getChild(0).getText().length() - 1);
if(attributePrefix != null){
if(attributeLocalName.startsWith(attributePrefix)){
attributeLocalName = attributeLocalName.substring(attributePrefix.length());
}else{
break;
}
}
Tree childValueTree = childTree.getChild(1);
switch(childValueTree.getType()) {
case JSONLexer.STRING: {
String stringValue = childValueTree.getChild(0).getText();
attributes.add(new Attribute("", attributeLocalName, attributeLocalName, stringValue.substring(1, stringValue.length() - 1)));
break;
}
case JSONLexer.NUMBER: {
attributes.add(new Attribute("", attributeLocalName, attributeLocalName, childValueTree.getChild(0).getText()));
break;
}
case JSONLexer.TRUE: {
attributes.add(new Attribute("", attributeLocalName, attributeLocalName, TRUE));
break;
}
case JSONLexer.FALSE: {
attributes.add(new Attribute("", attributeLocalName, attributeLocalName, FALSE));
break;
}
case JSONLexer.NULL: {
attributes.add(new Attribute("", attributeLocalName, attributeLocalName, ""));
break;
}
}
}
} else {
attributes = Collections.EMPTY_LIST;
}
}
return attributes;
}
}
}
|
package org.systemsbiology.addama.appengine.rest;
import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalMemcacheServiceTestConfig;
import com.google.appengine.tools.development.testing.LocalServiceTestHelper;
import com.google.appengine.tools.development.testing.LocalUserServiceTestConfig;
import org.json.JSONArray;
import org.json.JSONObject;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.web.servlet.ModelAndView;
import org.systemsbiology.addama.commons.web.exceptions.ForbiddenAccessException;
import java.util.HashSet;
import static org.junit.Assert.*;
import static org.systemsbiology.addama.appengine.util.Greenlist.isGreenlistActive;
import static org.systemsbiology.addama.appengine.util.Greenlist.isGreenlisted;
/**
* @author hrovira
*/
public class GreenlistControllerTest {
private LocalServiceTestHelper helper;
private GreenlistController CONTROLLER;
@Before
public void setUp() throws Exception {
helper = new LocalServiceTestHelper(new LocalUserServiceTestConfig(),
new LocalMemcacheServiceTestConfig(), new LocalDatastoreServiceTestConfig());
helper.setEnvEmail("admin@addama.org");
helper.setEnvIsLoggedIn(true);
helper.setEnvAuthDomain("addama.org");
helper.setUp();
CONTROLLER = new GreenlistController();
}
@After
public void tearDown() throws Exception {
if (helper != null) {
helper.tearDown();
}
}
@Test
public void noGreenlist() throws Exception {
helper.setEnvIsAdmin(true);
ModelAndView mav = CONTROLLER.list(new MockHttpServletRequest());
assertNotNull(mav);
JSONObject json = (JSONObject) mav.getModel().get("json");
assertNotNull(json);
assertFalse(json.has("numberOfItems"));
assertFalse(json.has("items"));
}
@Test
public void addUsers() throws Exception {
helper.setEnvIsAdmin(true);
for (int i = 0; i < 10; i++) {
String user = i + "@addama.org";
CONTROLLER.addUser(new MockHttpServletRequest("POST", "/addama/greenlist/" + user));
}
assertTrue(isGreenlistActive());
for (int i = 0; i < 10; i++) {
assertTrue(isGreenlisted(i + "@addama.org"));
}
assertFalse(isGreenlisted("33@addama.org"));
ModelAndView mav = CONTROLLER.list(new MockHttpServletRequest());
assertNotNull(mav);
JSONObject json = (JSONObject) mav.getModel().get("json");
assertNotNull(json);
assertTrue(json.has("items"));
JSONArray items = json.getJSONArray("items");
assertEquals(10, items.length());
HashSet<String> foundUsers = new HashSet<String>();
for (int i = 0; i < items.length(); i++) {
JSONObject item = items.getJSONObject(i);
foundUsers.add(item.getString("id"));
}
assertEquals(10, foundUsers.size());
for (int i = 0; i < 10; i++) {
assertTrue(foundUsers.contains(i + "@addama.org"));
}
}
@Test(expected = ForbiddenAccessException.class)
public void notAdmin_list() throws Exception {
helper.setEnvIsAdmin(false);
CONTROLLER.list(new MockHttpServletRequest());
}
@Test(expected = ForbiddenAccessException.class)
public void notAdmin_addUsers() throws Exception {
helper.setEnvIsAdmin(false);
CONTROLLER.list(new MockHttpServletRequest());
}
}
|
package org.phenotips.variantStoreIntegration.internal;
import org.phenotips.variantStoreIntegration.VariantStoreService;
import org.phenotips.variantstore.VariantStore;
import org.phenotips.variantstore.db.solr.SolrController;
import org.phenotips.variantstore.input.tsv.ExomiserTSVManager;
import org.phenotips.variantstore.shared.VariantStoreException;
import org.xwiki.component.annotation.Component;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.environment.Environment;
import java.nio.file.Paths;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* @version $Id$
*/
@Component
@Singleton
public class DefaultVariantStoreService extends AbstractVariantStoreProxy implements Initializable, VariantStoreService
{
@Inject
private Environment env;
@Override
public void initialize() throws InitializationException {
this.variantStore = new VariantStore(
new ExomiserTSVManager(),
new SolrController()
);
try {
this.variantStore.init(Paths.get(this.env.getPermanentDirectory().getPath()).resolve("variant-store"));
} catch (VariantStoreException e) {
throw new InitializationException("Error setting up Variant Store", e);
}
}
}
|
package org.jtalks.jcommune.web.controller;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.time.DateFormatUtils;
import org.jtalks.jcommune.model.entity.JCUser;
import org.jtalks.jcommune.service.UserService;
import org.jtalks.jcommune.service.exceptions.ImageFormatException;
import org.jtalks.jcommune.service.exceptions.ImageProcessException;
import org.jtalks.jcommune.service.exceptions.ImageSizeException;
import org.jtalks.jcommune.service.exceptions.NotFoundException;
import org.jtalks.jcommune.service.nontransactional.AvatarService;
import org.jtalks.jcommune.service.nontransactional.ImageUtils;
import org.jtalks.jcommune.web.dto.OperationResultDto;
import org.jtalks.jcommune.web.util.JSONUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.MessageSource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
/**
* Controller for processing avatar related request.
* todo: this class is too complex, we need to move some logic either to service or to a helper bean
*
* @author Alexandre Teterin
* @author Anuar Nurmakanov
*/
@Controller
public class AvatarController {
public static final String RESULT = "success";
static final String WRONG_FORMAT_RESOURCE_MESSAGE = "image.wrong.format";
static final String WRONG_SIZE_RESOURCE_MESSAGE = "image.wrong.size";
static final String COMMON_ERROR_RESOURCE_MESSAGE = "avatar.500.common.error";
public static final String HTTP_HEADER_DATETIME_PATTERN = "E, dd MMM yyyy HH:mm:ss z";
private static final String IF_MODIFIED_SINCE_HEADER = "If-Modified-Since";
private AvatarService avatarService;
private UserService userService;
private MessageSource messageSource;
private JSONUtils jsonUtils;
/**
* Constructor for controller instantiating, dependencies injected via autowiring.
*
* @param avatarService for avatar manipulation
* @param userService to manipulate user-related data
* @param messageSource to resolve locale-dependent messages
* @param jsonUtils to convert data to JSON format
*/
@Autowired
public AvatarController(
AvatarService avatarService,
UserService userService,
MessageSource messageSource,
JSONUtils jsonUtils) {
this.avatarService = avatarService;
this.userService = userService;
this.messageSource = messageSource;
this.jsonUtils = jsonUtils;
}
/**
* Process avatar file from request and return avatar preview in response.
* Used for IE, Opera specific request processing.
*
* @param file file, that contains uploaded image
* @return ResponseEntity
* @throws IOException defined in the JsonFactory implementation,
* caller must implement exception processing
* @throws ImageProcessException if error occurred while image processing
*/
@RequestMapping(value = "/users/IFrameAvatarpreview", method = RequestMethod.POST)
@ResponseBody
public ResponseEntity<String> uploadAvatar(
@RequestParam(value = "qqfile") MultipartFile file) throws IOException, ImageProcessException {
HttpHeaders responseHeaders = new HttpHeaders();
responseHeaders.setContentType(MediaType.TEXT_HTML);
Map<String, String> responseContent = new HashMap<String, String>();
return prepareResponse(file, responseHeaders, responseContent);
}
/**
* Process avatar file from request and return avatar preview in response.
* Used for FF, Chrome specific request processing
*
* @param bytes input avatar data
* @param response servlet response
* @return response content
* @throws ImageProcessException if error occurred while image processing
*/
@RequestMapping(value = "/users/XHRavatarpreview", method = RequestMethod.POST)
@ResponseBody
public Map<String, String> uploadAvatar(@RequestBody byte[] bytes,
HttpServletResponse response) throws ImageProcessException {
Map<String, String> responseContent = new HashMap<String, String>();
prepareResponse(bytes, response, responseContent);
return responseContent;
}
/**
* Write user avatar in response for rendering it on html pages.
*
* @param request servlet request
* @param response servlet response
* @param id user database identifier
* @throws NotFoundException if user with given encodedUsername not found
* @throws IOException throws if an output exception occurred
*/
@RequestMapping(value = "/users/{id}/avatar", method = RequestMethod.GET)
public void renderAvatar(
HttpServletRequest request,
HttpServletResponse response,
@PathVariable Long id)
throws NotFoundException, IOException {
JCUser user = userService.get(id);
Date ifModifiedDate = getIfModifiedSineHeader(request);
if (!user.getAvatarLastModificationTime().isAfter(ifModifiedDate.getTime())) {
response.setStatus(HttpServletResponse.SC_NOT_MODIFIED);
} else {
byte[] avatar = user.getAvatar();
response.setContentType("image/jpeg");
response.setContentLength(avatar.length);
Date avatarLastModificationDate = new Date(
user.getAvatarLastModificationTime().getMillis());
setupAvatarHeaders(response, avatarLastModificationDate);
response.getOutputStream().write(avatar);
}
}
/**
* Check 'If-Modified-Since' header in the request and converts it to
* {@link java.util.Date} representation
* @param request - HTTP request
* @return If-Modified-Since header or Jan 1, 1970 if it is not set or
* can't be parsed
*/
private Date getIfModifiedSineHeader(HttpServletRequest request) {
String ifModifiedSinceHeader = request.getHeader(IF_MODIFIED_SINCE_HEADER);
Date ifModifiedSinceDate = new Date(0);
if (ifModifiedSinceHeader != null) {
try {
DateFormat dateFormat = new SimpleDateFormat(
HTTP_HEADER_DATETIME_PATTERN,
Locale.US);
ifModifiedSinceDate = dateFormat.parse(ifModifiedSinceHeader);
} catch (ParseException e) {
// in case date is wrong or not specified date will be Jan 1, 1970.
}
}
return ifModifiedSinceDate;
}
/**
* Sets up avatar cache related headers.
* @param response - HTTP response object where set headers
* @param avatarLastModificationTime - last modification time of avatar
*/
private void setupAvatarHeaders(HttpServletResponse response,
Date avatarLastModificationTime) {
response.setHeader("Pragma", "public");
response.setHeader("Cache-Control", "public");
response.addHeader("Cache-Control", "must-revalidate");
response.addHeader("Cache-Control","max-age=0");
String formattedDateExpires = DateFormatUtils.format(
new Date(System.currentTimeMillis()),
HTTP_HEADER_DATETIME_PATTERN, Locale.US);
response.setHeader("Expires", formattedDateExpires);
String formattedDateLastModified = DateFormatUtils.format(
avatarLastModificationTime,
HTTP_HEADER_DATETIME_PATTERN, Locale.US);
response.setHeader("Last-Modified", formattedDateLastModified);
}
/**
* Prepare response with default user avatar
*
* @return JSON string with default user avatar
* @throws ImageProcessException due to common avatar processing error
* @throws IOException defined in the JsonFactory implementation, caller must implement exception processing
*/
@RequestMapping(value = "/defaultAvatar", method = RequestMethod.GET)
@ResponseBody
public String getDefaultAvatar() throws ImageProcessException, IOException {
Map<String, String> responseContent = new HashMap<String, String>();
prepareNormalResponse(avatarService.getDefaultAvatar(), responseContent);
return jsonUtils.prepareJSONString(responseContent);
}
/**
* Prepare valid response after avatar processing
*
* @param file file, that contains uploaded image
* @param responseHeaders response HTTP headers
* @param responseContent response content
* @return ResponseEntity with avatar processing results
* @throws IOException defined in the JsonFactory implementation, caller must implement exception processing
* @throws ImageProcessException if error occurred while image processing
*/
private ResponseEntity<String> prepareResponse(MultipartFile file,
HttpHeaders responseHeaders,
Map<String, String> responseContent)
throws IOException, ImageProcessException {
avatarService.validateAvatarFormat(file);
byte[] bytes = file.getBytes();
avatarService.validateAvatarSize(bytes);
prepareNormalResponse(bytes, responseContent);
String body = jsonUtils.prepareJSONString(responseContent);
return new ResponseEntity<String>(body, responseHeaders, HttpStatus.OK);
}
/**
* Prepare valid response after avatar processing
*
* @param bytes input avatar data
* @param response resulting response
* @param responseContent with avatar processing results
* @throws ImageProcessException if it's impossible to form correct image response
*/
private void prepareResponse(byte[] bytes,
HttpServletResponse response,
Map<String, String> responseContent) throws ImageProcessException {
avatarService.validateAvatarFormat(bytes);
avatarService.validateAvatarSize(bytes);
prepareNormalResponse(bytes, responseContent);
response.setStatus(HttpServletResponse.SC_OK);
}
/**
* Used for prepare normal response.
*
* @param bytes input avatar data
* @param responseContent response payload
* @throws ImageProcessException due to common avatar processing error
*/
private void prepareNormalResponse(byte[] bytes,
Map<String, String> responseContent) throws ImageProcessException {
String srcImage = avatarService.convertBytesToBase64String(bytes);
responseContent.put(RESULT, "true");
responseContent.put("srcPrefix", ImageUtils.HTML_SRC_TAG_PREFIX);
responseContent.put("srcImage", srcImage);
}
/**
* Handles an exception that is thrown when the avatar has incorrect size.
*
* @param e exception
* @param locale locale, it's needed for error message localization
* @return DTO, that contains information about error, it will be converted to JSON
*/
@ExceptionHandler(value = ImageSizeException.class)
@ResponseBody
public OperationResultDto handleImageSizeException(ImageSizeException e, Locale locale) {
Object[] parameters = new Object[]{e.getMaxSize()};
String errorMessage = messageSource.getMessage(WRONG_SIZE_RESOURCE_MESSAGE, parameters, locale);
return new OperationResultDto(errorMessage);
}
/**
* Handles an exception that is thrown when the avatar has incorrect format.
*
* @param e exception
* @param locale locale, it's needed for error message localization
* @return DTO, that contains information about error, it will be converted to JSON
*/
@ExceptionHandler(value = ImageFormatException.class)
@ResponseBody
public OperationResultDto handleImageFormatException(ImageFormatException e, Locale locale) {
String errorMessage = messageSource.getMessage(WRONG_FORMAT_RESOURCE_MESSAGE, null, locale);
return new OperationResultDto(errorMessage);
}
/**
* Handles common exception that can occur when loading an avatar.
*
* @param e exception
* @param locale locale, it's needed for error message localization
* @return DTO, that contains information about error, it will be converted to JSON
*/
@ExceptionHandler(value = ImageProcessException.class)
@ResponseBody
public OperationResultDto handleImageProcessException(ImageProcessException e, Locale locale) {
String errorMessage = messageSource.getMessage(COMMON_ERROR_RESOURCE_MESSAGE, null, locale);
return new OperationResultDto(errorMessage);
}
}
|
package fi.otavanopisto.kuntaapi.server.rest;
import fi.otavanopisto.kuntaapi.server.rest.model.BadRequest;
import fi.otavanopisto.kuntaapi.server.rest.model.Service;
import fi.otavanopisto.kuntaapi.server.rest.model.Forbidden;
import fi.otavanopisto.kuntaapi.server.rest.model.NotImplemented;
import fi.otavanopisto.kuntaapi.server.rest.model.InternalServerError;
import fi.otavanopisto.kuntaapi.server.rest.model.NotFound;
import fi.otavanopisto.kuntaapi.server.rest.model.Event;
import fi.otavanopisto.kuntaapi.server.rest.model.Attachment;
import fi.otavanopisto.kuntaapi.server.rest.model.Organization;
import fi.otavanopisto.kuntaapi.server.rest.model.ServiceClass;
import fi.otavanopisto.kuntaapi.server.rest.model.ServiceElectronicChannel;
import javax.ws.rs.*;
import javax.ws.rs.core.Response;
import io.swagger.annotations.*;
import java.util.List;
@Path("/organizations")
@Api(description = "the organizations API")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@javax.annotation.Generated(value = "class io.swagger.codegen.languages.JavaJAXRSSpecServerCodegen", date = "2016-09-07T12:53:04.993+03:00")
public abstract class OrganizationsApi extends AbstractApi {
@POST
@Path("/{organizationId}/services")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Create a service", notes = "Creates new service for the organization ", response = Service.class, responseContainer = "List", tags={ "Services", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "An array of services", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 404, message = "Not found", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 500, message = "Internal server error", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 501, message = "Returned when selected service does support modification of data", response = Service.class, responseContainer = "List") })
public abstract Response createService(@PathParam("organizationId") String organizationId,Service body);
@DELETE
@Path("/{organizationId}/services/{serviceId}")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Delete a service", notes = "Delete a single municipal service ", response = void.class, tags={ "Services", })
@ApiResponses(value = {
@ApiResponse(code = 204, message = "Empty response indicating a succesfull removal", response = void.class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = void.class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = void.class),
@ApiResponse(code = 404, message = "Not found", response = void.class),
@ApiResponse(code = 500, message = "Internal server error", response = void.class) })
public abstract Response deleteService(@PathParam("organizationId") String organizationId,@PathParam("serviceId") String serviceId);
@GET
@Path("/{organizationId}/events/{eventId}")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Returns organizations event by id", notes = "Returns organizations event by id ", response = Event.class, tags={ "Events", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns a single event", response = Event.class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Event.class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Event.class),
@ApiResponse(code = 500, message = "Internal server error", response = Event.class) })
public abstract Response findOrganizationEvent(@PathParam("organizationId") String organizationId,@PathParam("eventId") String eventId);
@GET
@Path("/{organizationId}/events/{eventId}/images/{imageId}")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Returns an event image", notes = "Returns an event image ", response = Attachment.class, tags={ "Events", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns an event image", response = Attachment.class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Attachment.class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Attachment.class),
@ApiResponse(code = 500, message = "Internal server error", response = Attachment.class) })
public abstract Response findOrganizationEventImage(@PathParam("organizationId") String organizationId,@PathParam("eventId") String eventId,@PathParam("imageId") String imageId);
@GET
@Path("/{organizationId}/services/{serviceId}")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Find a service by id", notes = "Returns single service by it's unique id. ", response = Service.class, tags={ "Services", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns a single municipal service", response = Service.class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Service.class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Service.class),
@ApiResponse(code = 404, message = "Not found", response = Service.class),
@ApiResponse(code = 500, message = "Internal server error", response = Service.class) })
public abstract Response findService(@PathParam("organizationId") String organizationId,@PathParam("serviceId") String serviceId);
@GET
@Path("/{organizationId}/events/{eventId}/images/{imageId}/data")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/octet-stream" })
@ApiOperation(value = "Returns an event image data", notes = "Returns an event image data ", response = byte[].class, tags={ "Events", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns an event image data", response = byte[].class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = byte[].class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = byte[].class),
@ApiResponse(code = 500, message = "Internal server error", response = byte[].class) })
public abstract Response getOrganizationEventImageData(@PathParam("organizationId") String organizationId,@PathParam("eventId") String eventId,@PathParam("imageId") String imageId,@QueryParam("size") Integer size);
@GET
@Path("/{organizationId}/events/{eventId}/images")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Returns list of event images", notes = "Returns list of event images ", response = Attachment.class, responseContainer = "List", tags={ "Events", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns a list of event images", response = Attachment.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Attachment.class, responseContainer = "List"),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Attachment.class, responseContainer = "List"),
@ApiResponse(code = 500, message = "Internal server error", response = Attachment.class, responseContainer = "List") })
public abstract Response listOrganizationEventImages(@PathParam("organizationId") String organizationId,@PathParam("eventId") String eventId);
@GET
@Path("/{organizationId}/events")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Lists organizations events", notes = "Lists organizations events ", response = Event.class, responseContainer = "List", tags={ "Events", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns a list of events", response = Event.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Event.class, responseContainer = "List"),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Event.class, responseContainer = "List"),
@ApiResponse(code = 500, message = "Internal server error", response = Event.class, responseContainer = "List") })
public abstract Response listOrganizationEvents(@PathParam("organizationId") String organizationId,@QueryParam("startBefore") String startBefore,@QueryParam("startAfter") String startAfter,@QueryParam("endBefore") String endBefore,@QueryParam("endAfter") String endAfter,@QueryParam("firstResult") Integer firstResult,@QueryParam("maxResults") Integer maxResults,@QueryParam("orderBy") String orderBy,@QueryParam("orderDir") String orderDir);
@GET
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "List organizations", notes = "List organizations", response = Organization.class, responseContainer = "List", tags={ "Organizations", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "An array of organizations", response = Organization.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Organization.class, responseContainer = "List"),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Organization.class, responseContainer = "List"),
@ApiResponse(code = 500, message = "Internal server error", response = Organization.class, responseContainer = "List") })
public abstract Response listOrganizations(@QueryParam("businessName") String businessName,@QueryParam("businessCode") String businessCode);
@GET
@Path("/{organizationId}/serviceClasses/")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "List service classes for an organization", notes = "Returns list of organization's service classes ", response = ServiceClass.class, responseContainer = "List", tags={ "Services", "Service Categories", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Returns a list of organization's service classes", response = ServiceClass.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = ServiceClass.class, responseContainer = "List"),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = ServiceClass.class, responseContainer = "List"),
@ApiResponse(code = 404, message = "Not found", response = ServiceClass.class, responseContainer = "List"),
@ApiResponse(code = 500, message = "Internal server error", response = ServiceClass.class, responseContainer = "List") })
public abstract Response listServiceClasses(@PathParam("organizationId") String organizationId);
@GET
@Path("/{organizationId}/services/{serviceId}/electronicChannels")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "List service electornic channels", notes = "Lists service electronic channels ", response = ServiceElectronicChannel.class, tags={ "Services", "ServiceChannels", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "List of service electornic channels", response = ServiceElectronicChannel.class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = ServiceElectronicChannel.class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = ServiceElectronicChannel.class),
@ApiResponse(code = 404, message = "Not found", response = ServiceElectronicChannel.class),
@ApiResponse(code = 500, message = "Internal server error", response = ServiceElectronicChannel.class) })
public abstract Response listServiceElectornicChannels(@PathParam("organizationId") String organizationId,@PathParam("serviceId") String serviceId);
@GET
@Path("/{organizationId}/services")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "List services", notes = "Lists organization's services ", response = Service.class, responseContainer = "List", tags={ "Services", })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "An array of services", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 404, message = "Not found", response = Service.class, responseContainer = "List"),
@ApiResponse(code = 500, message = "Internal server error", response = Service.class, responseContainer = "List") })
public abstract Response listServices(@PathParam("organizationId") String organizationId,@QueryParam("serviceClassId") String serviceClassId);
@PUT
@Path("/{organizationId}/services/{serviceId}")
@Consumes({ "application/json;charset=utf-8" })
@Produces({ "application/json;charset=utf-8" })
@ApiOperation(value = "Update a service", notes = "Updates a single municipal service ", response = Service.class, tags={ "Services" })
@ApiResponses(value = {
@ApiResponse(code = 200, message = "Created service", response = Service.class),
@ApiResponse(code = 400, message = "Invalid request was sent to the server", response = Service.class),
@ApiResponse(code = 403, message = "Attempted to make a call with unauthorized client", response = Service.class),
@ApiResponse(code = 404, message = "Not found", response = Service.class),
@ApiResponse(code = 500, message = "Internal server error", response = Service.class),
@ApiResponse(code = 501, message = "Returned when endpoint does support modification of data", response = Service.class) })
public abstract Response updateService(@PathParam("organizationId") String organizationId,@PathParam("serviceId") String serviceId);
}
|
package com.matthewtamlin.spyglass.library.default_annotations;
import com.matthewtamlin.java_utilities.testing.Tested;
import com.matthewtamlin.spyglass.library.default_adapters.DefaultToDimensionResourceAdapter;
import com.matthewtamlin.spyglass.library.meta_annotations.Default;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Defines a default for the annotated method, so that the Spyglass framework can invoke the
* method if its handler annotation is not satisfied. This annotation should only be applied to
* methods which satisfy all of the following criteria:
* <ul>
* <li>The method has a handler annotation.</li>
* <li>The method has no other default annotations.</li>
* <li>The method has at least one integer parameter.</li>
* <li>One integer parameter has no use annotation.</li>
* <li>Every other parameter has a use annotation.</li>
* </ul>
* <p>
* The dimension value is converted to units of pixels before being passed to the method.
*/
@Tested(testMethod = "automated")
@Default(adapterClass = DefaultToDimensionResourceAdapter.class)
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface DefaultToDimensionResource {
/**
* @return the resource ID of the default value, must resolve to a dimension resource
*/
int value();
}
|
package org.mifosplatform.portfolio.search.service;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collection;
import org.mifosplatform.infrastructure.core.domain.JdbcSupport;
import org.mifosplatform.infrastructure.core.service.TenantAwareRoutingDataSource;
import org.mifosplatform.infrastructure.security.service.PlatformSecurityContext;
import org.mifosplatform.portfolio.search.data.SearchConditions;
import org.mifosplatform.portfolio.search.data.SearchData;
import org.mifosplatform.useradministration.domain.AppUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.stereotype.Service;
@Service
public class SearchReadPlatformServiceImpl implements SearchReadPlatformService {
private final NamedParameterJdbcTemplate namedParameterjdbcTemplate;
private final PlatformSecurityContext context;
@Autowired
public SearchReadPlatformServiceImpl(final PlatformSecurityContext context, final TenantAwareRoutingDataSource dataSource) {
this.context = context;
this.namedParameterjdbcTemplate = new NamedParameterJdbcTemplate(dataSource);
}
@Override
public Collection<SearchData> retriveMatchingData(final SearchConditions searchConditions) {
AppUser currentUser = context.authenticatedUser();
String hierarchy = currentUser.getOffice().getHierarchy();
SearchMapper rm = new SearchMapper();
MapSqlParameterSource params = new MapSqlParameterSource();
params.addValue("hierarchy", hierarchy + "%");
params.addValue("search", searchConditions.getSearchQuery());
params.addValue("partialSearch", "%" + searchConditions.getSearchQuery() + "%");
return this.namedParameterjdbcTemplate.query(rm.searchSchema(searchConditions), params, rm);
}
private static final class SearchMapper implements RowMapper<SearchData> {
public String searchSchema(final SearchConditions searchConditions) {
String union = " union all ";
String clientExactMatchSql = " (select 'CLIENT' as entityType, c.id as entityId, c.display_name as entityName, c.external_id as entityExternalId, c.account_no as entityAccountNo "
+ " , c.office_id as parentId, o.name as parentName "
+ " from m_client c join m_office o on o.id = c.office_id where o.hierarchy like :hierarchy and (c.account_no like :search or c.display_name like :search or c.external_id like :search)) ";
String clientMatchSql = " (select 'CLIENT' as entityType, c.id as entityId, c.display_name as entityName, c.external_id as entityExternalId, c.account_no as entityAccountNo "
+ " , c.office_id as parentId, o.name as parentName "
+ " from m_client c join m_office o on o.id = c.office_id where o.hierarchy like :hierarchy and (c.account_no like :partialSearch and c.account_no not like :search) or "
+ "(c.display_name like :partialSearch and c.display_name not like :search) or "
+ "(c.external_id like :partialSearch and c.external_id not like :search))";
String loanExactMatchSql = " (select 'LOAN' as entityType, l.id as entityId, pl.name as entityName, l.external_id as entityExternalId, l.account_no as entityAccountNo "
+ " , c.id as parentId, c.display_name as parentName "
+ " from m_loan l join m_client c on l.client_id = c.id join m_office o on o.id = c.office_id join m_product_loan pl on pl.id=l.product_id where o.hierarchy like :hierarchy and l.account_no like :search) ";
String loanMatchSql = " (select 'LOAN' as entityType, l.id as entityId, pl.name as entityName, l.external_id as entityExternalId, l.account_no as entityAccountNo "
+ " , c.id as parentId, c.display_name as parentName "
+ " from m_loan l join m_client c on l.client_id = c.id join m_office o on o.id = c.office_id join m_product_loan pl on pl.id=l.product_id where o.hierarchy like :hierarchy and l.account_no like :partialSearch and l.account_no not like :search) ";
String clientIdentifierExactMatchSql = " (select 'CLIENTIDENTIFIER' as entityType, ci.id as entityId, ci.document_key as entityName, "
+ " null as entityExternalId, null as entityAccountNo, c.id as parentId, c.display_name as parentName "
+ " from m_client_identifier ci join m_client c on ci.client_id=c.id join m_office o on o.id = c.office_id "
+ " where o.hierarchy like :hierarchy and ci.document_key like :search) ";
String clientIdentifierMatchSql = " (select 'CLIENTIDENTIFIER' as entityType, ci.id as entityId, ci.document_key as entityName, "
+ " null as entityExternalId, null as entityAccountNo, c.id as parentId, c.display_name as parentName "
+ " from m_client_identifier ci join m_client c on ci.client_id=c.id join m_office o on o.id = c.office_id "
+ " where o.hierarchy like :hierarchy and ci.document_key like :partialSearch and ci.document_key not like :search) ";
String groupExactMatchSql = " (select IF(g.level_id=1,'CENTER','GROUP') as entityType, g.id as entityId, g.display_name as entityName, g.external_id as entityExternalId, NULL as entityAccountNo "
+ " , g.office_id as parentId, o.name as parentName "
+ " from m_group g join m_office o on o.id = g.office_id where o.hierarchy like :hierarchy and g.display_name like :search) ";
String groupMatchSql = " (select IF(g.level_id=1,'CENTER','GROUP') as entityType, g.id as entityId, g.display_name as entityName, g.external_id as entityExternalId, NULL as entityAccountNo "
+ " , g.office_id as parentId, o.name as parentName "
+ " from m_group g join m_office o on o.id = g.office_id where o.hierarchy like :hierarchy and g.display_name like :partialSearch and g.display_name not like :search) ";
StringBuffer sql = new StringBuffer();
// first include all exact matches
if (searchConditions.isClientSearch()) {
sql.append(clientExactMatchSql).append(union);
}
if (searchConditions.isLoanSeach()) {
sql.append(loanExactMatchSql).append(union);
}
if(searchConditions.isClientIdentifierSearch()){
sql.append(clientIdentifierExactMatchSql).append(union);
}
if (searchConditions.isGroupSearch()) {
sql.append(groupExactMatchSql).append(union);
}
// include all matching records
if (searchConditions.isClientSearch()) {
sql.append(clientMatchSql).append(union);
}
if (searchConditions.isLoanSeach()) {
sql.append(loanMatchSql).append(union);
}
if(searchConditions.isClientIdentifierSearch()){
sql.append(clientIdentifierMatchSql).append(union);
}
if (searchConditions.isGroupSearch()) {
sql.append(groupMatchSql).append(union);
}
sql.replace(sql.lastIndexOf(union), sql.length(), "");
// remove last occurrence of "union all" string
return sql.toString();
}
@Override
public SearchData mapRow(final ResultSet rs, @SuppressWarnings("unused") final int rowNum) throws SQLException {
final Long entityId = JdbcSupport.getLong(rs, "entityId");
final String entityAccountNo = rs.getString("entityAccountNo");
final String entityExternalId = rs.getString("entityExternalId");
final String entityName = rs.getString("entityName");
final String entityType = rs.getString("entityType");
final Long parentId = JdbcSupport.getLong(rs, "parentId");
final String parentName = rs.getString("parentName");
return new SearchData(entityId, entityAccountNo, entityExternalId, entityName, entityType, parentId, parentName);
}
}
}
|
package org.opencps.auth.security.authverifier;
import java.util.Properties;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.HttpMethod;
import org.osgi.service.component.annotations.Component;
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import com.liferay.portal.kernel.model.User;
import com.liferay.portal.kernel.security.auth.AccessControlContext;
import com.liferay.portal.kernel.security.auth.AuthException;
import com.liferay.portal.kernel.security.auth.AuthTokenUtil;
import com.liferay.portal.kernel.security.auth.http.HttpAuthManagerUtil;
import com.liferay.portal.kernel.security.auth.http.HttpAuthorizationHeader;
import com.liferay.portal.kernel.security.auth.verifier.AuthVerifier;
import com.liferay.portal.kernel.security.auth.verifier.AuthVerifierResult;
import com.liferay.portal.kernel.security.auto.login.AutoLoginException;
import com.liferay.portal.kernel.util.GetterUtil;
import com.liferay.portal.kernel.util.MapUtil;
import com.liferay.portal.kernel.util.PortalUtil;
import com.liferay.portal.kernel.util.StringPool;
import com.liferay.portal.kernel.util.StringUtil;
import com.liferay.portal.kernel.util.Validator;
import com.liferay.portal.security.auto.login.basic.auth.header.BasicAuthHeaderAutoLogin;
@Component(immediate = true, property = {
"auth.verifier.OpenCPSAuthHeaderAuthVerifier.urls.includes=*"
})
public class OpenCPSAuthHeaderAuthVerifier extends BasicAuthHeaderAutoLogin
implements AuthVerifier {
private static final String AUTHORIZATION_HEADER = "Authorization";
// private static final String TOKEN_HEADER = "X-CSRF-Token";
private static final String TOKEN_HEADER = "Token";
@Override
protected String[] doLogin(
HttpServletRequest request, HttpServletResponse response)
throws Exception {
long companyId = PortalUtil.getCompanyId(request);
if (!isEnabled(companyId)) {
return null;
}
String token = request.getHeader(TOKEN_HEADER);
String[] credentials = new String[3];
if (Validator.isNotNull(token)) {
String authToken = AuthTokenUtil.getToken(PortalUtil.getOriginalServletRequest(request));
if (authToken == null || (authToken != null && !authToken.equals(token))) {
return null;
}
User u = PortalUtil.getUser(request);
if (u != null) {
credentials[0] = String.valueOf(u.getUserId());
credentials[2] = Boolean.TRUE.toString();
}
}
else {
String authorization = request.getHeader(AUTHORIZATION_HEADER);
if (Validator.isNotNull(authorization)) {
String[] schemaData =
StringUtil.split(authorization, StringPool.SPACE);
if (schemaData == null || schemaData.length != 2) {
return null;
}
HttpAuthorizationHeader httpAuthorizationHeader =
HttpAuthManagerUtil.parse(request);
if (httpAuthorizationHeader == null) {
return null;
}
String scheme = httpAuthorizationHeader.getScheme();
// We only handle HTTP Basic authentication
if (!StringUtil.equalsIgnoreCase(
scheme, HttpAuthorizationHeader.SCHEME_BASIC)) {
return null;
}
long userId =
HttpAuthManagerUtil.getUserId(request, httpAuthorizationHeader);
if (userId <= 0) {
throw new AuthException();
}
credentials[0] = String.valueOf(userId);
credentials[1] = httpAuthorizationHeader.getAuthParameter(
HttpAuthorizationHeader.AUTH_PARAMETER_NAME_PASSWORD);
credentials[2] = Boolean.TRUE.toString();
}
else {
//Check if GET method
if (request.getMethod().equals(HttpMethod.GET)) {
User u = PortalUtil.getUser(request);
if (u != null) {
credentials[0] = String.valueOf(u.getUserId());
credentials[2] = Boolean.TRUE.toString();
}
}
else {
return null;
}
}
}
return credentials;
}
@Override
public String getAuthType() {
return HttpServletRequest.BASIC_AUTH;
}
@Override
public AuthVerifierResult verify(AccessControlContext accessControlContext, Properties properties)
throws AuthException {
try {
AuthVerifierResult authVerifierResult = new AuthVerifierResult();
String[] credentials = login(
accessControlContext.getRequest(),
accessControlContext.getResponse());
if (credentials != null) {
authVerifierResult.setPassword(credentials[1]);
authVerifierResult.setPasswordBasedAuthentication(true);
authVerifierResult.setState(AuthVerifierResult.State.SUCCESS);
authVerifierResult.setUserId(Long.valueOf(credentials[0]));
}
else {
boolean forcedBasicAuth = MapUtil.getBoolean(
accessControlContext.getSettings(), "basic_auth");
if (!forcedBasicAuth) {
forcedBasicAuth = GetterUtil.getBoolean(
properties.getProperty("basic_auth"));
}
if (forcedBasicAuth) {
HttpAuthorizationHeader httpAuthorizationHeader =
new HttpAuthorizationHeader(
HttpAuthorizationHeader.SCHEME_BASIC);
HttpAuthManagerUtil.generateChallenge(
accessControlContext.getRequest(),
accessControlContext.getResponse(),
httpAuthorizationHeader);
authVerifierResult.setState(
AuthVerifierResult.State.INVALID_CREDENTIALS);
}
else {
authVerifierResult.setState(
AuthVerifierResult.State.INVALID_CREDENTIALS);
}
}
return authVerifierResult;
}
catch (AutoLoginException ale) {
throw new AuthException(ale);
}
}
@Override
protected boolean isEnabled(long companyId) {
return true;
}
private Log _log =
LogFactoryUtil.getLog(OpenCPSAuthHeaderAuthVerifier.class.getName());
}
|
package com.intellij.openapi.diff.impl;
import junit.framework.TestCase;
public class IgnoreWhiteSpaceTest extends TestCase {
private ComparisonPolicy myPolicy;
public void t_estTrim() {
myPolicy = ComparisonPolicy.TRIM_SPACE;
Object[] keys = myPolicy.getLineWrappers(new String[]{"a b", " a b ", "\ta b", "a b"});
assertEquals(keys[0], keys[1]);
assertEquals(keys[1], keys[2]);
assertFalse(keys[2].equals(keys[3]));
keys = myPolicy.getWrappers(new String[]{" a b", " a b ", " a b \n", "\ta b", "\n", " "});
assertEquals(keys[0], keys[3]);
assertFalse(keys[0].equals(keys[1]));
assertEquals(" a b", keys[2]);
assertEquals("", keys[4]);
assertEquals("", keys[5]);
}
public void testIgnore() {
myPolicy = ComparisonPolicy.IGNORE_SPACE;
Object[] keys = myPolicy.getLineWrappers(new String[]{"a b", " a b", " a b ", "ab", " b a"});
assertEquals(keys[0], keys[1]);
assertEquals(keys[1], keys[2]);
assertEquals(keys[2], keys[3]);
assertFalse(keys[1].equals(keys[4]));
keys = myPolicy.getWrappers(new String[]{" ", " ", "\t\n", "a"});
assertEquals(keys[0], keys[1]);
assertEquals(keys[1], keys[2]);
assertFalse(keys[2].equals(keys[3]));
}
}
|
package com.esri.geoevent.processor.motioncalculator;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Timer;
import java.util.TimerTask;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.esri.ges.core.Uri;
import com.esri.ges.core.component.ComponentException;
import com.esri.ges.core.geoevent.DefaultFieldDefinition;
import com.esri.ges.core.geoevent.FieldDefinition;
import com.esri.ges.core.geoevent.FieldType;
import com.esri.ges.core.geoevent.GeoEvent;
import com.esri.ges.core.geoevent.GeoEventDefinition;
import com.esri.ges.core.geoevent.GeoEventPropertyName;
import com.esri.ges.core.validation.ValidationException;
import com.esri.ges.manager.geoeventdefinition.GeoEventDefinitionManager;
import com.esri.ges.manager.geoeventdefinition.GeoEventDefinitionManagerException;
import com.esri.ges.messaging.EventDestination;
import com.esri.ges.messaging.EventProducer;
import com.esri.ges.messaging.EventUpdatable;
import com.esri.ges.messaging.GeoEventCreator;
import com.esri.ges.messaging.GeoEventProducer;
import com.esri.ges.messaging.Messaging;
import com.esri.ges.messaging.MessagingException;
import com.esri.ges.processor.GeoEventProcessorBase;
import com.esri.ges.processor.GeoEventProcessorDefinition;
import com.esri.ges.spatial.Geometry;
import com.esri.ges.spatial.Point;
import com.esri.ges.spatial.Polyline;
import com.esri.ges.spatial.Spatial;
import com.esri.ges.util.Converter;
import com.esri.ges.util.Validator;
public class MotionCalculator extends GeoEventProcessorBase implements EventProducer, EventUpdatable
{
private static final Log log = LogFactory.getLog(MotionCalculator.class);
private Spatial spatial;
private MotionCalculatorNotificationMode notificationMode;
private long reportInterval;
private final Map<String, MotionElements> motionElementsCache = new ConcurrentHashMap<String, MotionElements>();
private Messaging messaging;
private GeoEventCreator geoEventCreator;
private GeoEventProducer geoEventProducer;
private EventDestination destination;
private String distanceUnit;
private String geometryType;
private String predictiveGeometryType;
private Integer predictiveTimespan;
private Date resetTime;
private boolean autoResetCache;
private Timer clearCacheTimer;
private boolean clearCache;
private Uri definitionUri;
private String definitionUriString;
private boolean isReporting = false;
private GeoEventDefinitionManager geoEventDefinitionManager;
private Map<String, String> edMapper = new ConcurrentHashMap<String, String>();
private String newGeoEventDefinitionName;
final Object lock1 = new Object();
class MotionElements
{
private GeoEvent previousGeoEvent;
private GeoEvent currentGeoEvent;
private String id;
private Geometry lineGeometry;
private Double distance = 0.0; // distance defaulted to KMs,
// but may change to miles
// based on the distanceunit
private Double height = 0.0;
private Double slope = 0.0;
private Double timespanSeconds = 0.0;
private Double speed = 0.0;
private Double acceleration = 0.0; // distances per second square
private Double headingDegrees = 0.0;
private Double cumulativeDistance = 0.0;
private Double cumulativeHeight = 0.0;
private Double cumulativeTimeSeconds = 0.0;
private Double minDistance = Double.MAX_VALUE;
private Double maxDistance = Double.MIN_VALUE;
private Double avgDistance = 0.0;
private Double minHeight = Double.MAX_VALUE;
private Double maxHeight = Double.MIN_VALUE;
private Double avgHeight = 0.0;
private Double minSpeed = Double.MAX_VALUE;
private Double maxSpeed = Double.MIN_VALUE;
private Double avgSpeed = 0.0;
private Double minAcceleration = Double.MAX_VALUE;
private Double maxAcceleration = Double.MIN_VALUE;
private Double avgAcceleration = 0.0;
private Double minTimespan = Double.MAX_VALUE;
private Double maxTimespan = Double.MIN_VALUE;
private Double avgTimespan = 0.0;
private Double minSlope = Double.MAX_VALUE;
private Double maxSlope = Double.MIN_VALUE;
private Double avgSlope = 0.0;
private Long count = 0L;
private Date predictiveTime;
public MotionElements(GeoEvent geoevent)
{
this.currentGeoEvent = geoevent;
System.out.println("MotionElements");
System.out.println(geoevent.toString());
}
public void setGeoEvent(GeoEvent geoevent)
{
System.out.println("setGeoEvent");
System.out.println(geoevent.toString());
this.previousGeoEvent = this.getCurrentGeoEvent();
this.currentGeoEvent = geoevent;
}
public Long getCount()
{
return count;
}
public Double getCumulativeDistance()
{
return cumulativeDistance;
}
public Double getCumulativeHeight()
{
return cumulativeHeight;
}
public Double getCumulativeTime()
{
return cumulativeTimeSeconds;
}
public Geometry getGeometry()
{
if (geometryType.equals("Point"))
{
//returns the original geometry -- don't care for type for now
return this.getCurrentGeoEvent().getGeometry();
}
else
{
return lineGeometry;
}
}
public void computeTimespan()
{
Long timespanMilliSecs = 0L;
timespanMilliSecs = getCurrentGeoEvent().getStartTime().getTime() - getPreviousGeoEvent().getStartTime().getTime();
timespanSeconds = timespanMilliSecs / 1000.0;
if (timespanSeconds == 0.0)
{
timespanSeconds = 0.0000000001; // set to very small value to avoid
// divisor is 0
}
if (minTimespan > timespanSeconds)
{
minTimespan = timespanSeconds;
}
if (maxTimespan < timespanSeconds)
{
maxTimespan = timespanSeconds;
}
cumulativeTimeSeconds = cumulativeTimeSeconds + timespanSeconds;
if (count > 0)
{
avgTimespan = cumulativeTimeSeconds / count;
}
else
{
avgTimespan = cumulativeTimeSeconds;
}
}
public void calculateAndSendReport()
{
if (this.previousGeoEvent == null) {
return;
}
count++;
//Need to compute timespan first
computeTimespan();
Point from = (Point) getPreviousGeoEvent().getGeometry();
Point to = (Point) getCurrentGeoEvent().getGeometry();
// distance = halversineDistance(from.getX(), from.getY(), to.getX(), to.getY());
distance = lawOfCosineDistance(from.getX(), from.getY(), to.getX(), to.getY());
height = to.getZ() - from.getZ(); // assuming Z unit is the same as domain as distance unit, e.g. KM-Meter, Miles-feet
slope = height / (distance * 1000.0); // make KM distance into meters
if (distanceUnit == "Miles")
{
this.distance *= 0.621371; // Convert KMs to Miles -- will affect all
// subsequent calculation
slope = height / (distance * 5280.0 ); //make mile distance into feet
}
else if (distanceUnit == "Nautical Miles")
{
this.distance *= 0.539957; // Convert KMs to Nautical Miles
slope = height / (distance * 6076.12); // make nautical mile distance into feet;
}
Double timespanHours = timespanSeconds / (3600.0);
Double newSpeed = distance / timespanHours;
acceleration = (newSpeed - speed) / timespanHours;
speed = newSpeed;
if (minDistance > distance)
{
minDistance = distance;
}
if (maxDistance < distance)
{
maxDistance = distance;
}
if (minHeight > height)
{
minHeight = height;
}
if (maxHeight < height)
{
maxHeight = height;
}
if (minSlope > slope)
{
minSlope = slope;
}
if (maxSlope < slope)
{
maxSlope = slope;
}
if (minSpeed > speed)
{
minSpeed = speed;
}
if (maxSpeed < speed)
{
maxSpeed = speed;
}
if (minAcceleration > acceleration)
{
minAcceleration = acceleration;
}
if (maxAcceleration < acceleration)
{
maxAcceleration = acceleration;
}
if (Double.isNaN(distance) == false)
{
cumulativeDistance += distance;
}
if (Double.isNaN(height) == false)
{
cumulativeHeight += height;
}
avgDistance = cumulativeDistance / count;
avgHeight = cumulativeHeight / count;
// avgSpeed = cumulativeDistance / (cumulativeTimeSeconds / 3600.0);
avgSpeed = avgDistance / avgTimespan;
avgAcceleration = avgSpeed / avgTimespan;
headingDegrees = heading(from.getX(), from.getY(), to.getX(), to.getY());
Polyline polyline = spatial.createPolyline();
polyline.startPath(from.getX(), from.getY(), Double.NaN);
polyline.lineTo(to.getX(), to.getY(), Double.NaN);
this.lineGeometry = polyline;
sendReport();
}
private void sendReport()
{
if (notificationMode != MotionCalculatorNotificationMode.OnChange)
{
return;
}
System.out.println("sendReport");
try
{
GeoEvent outGeoEvent = createMotionGeoEvent();
if (outGeoEvent == null)
{
System.out.println("outGeoEvent is null");
return;
}
System.out.print(outGeoEvent.toString());
send(outGeoEvent);
}
catch (MessagingException e)
{
log.error("Error sending update GeoEvent for " + id, e);
}
}
private GeoEvent createMotionGeoEvent()
{
GeoEventDefinition edOut;
GeoEvent geoEventOut = null;
try
{
edOut = lookupAndCreateEnrichedDefinition(this.currentGeoEvent.getGeoEventDefinition());
if (edOut == null)
{
System.out.println("edOut is null");
return null;
}
geoEventOut = geoEventCreator.create(edOut.getGuid(), new Object[] {getCurrentGeoEvent().getAllFields(), createMotionGeoEventFields(currentGeoEvent.getTrackId(), this)});
geoEventOut.setProperty(GeoEventPropertyName.TYPE, "event"); //need to use "event" instead of "message" otherwise the resulting GeoEvent will come back in the process() method
geoEventOut.setProperty(GeoEventPropertyName.OWNER_ID, getId());
geoEventOut.setProperty(GeoEventPropertyName.OWNER_URI, definition.getUri());
for (Map.Entry<GeoEventPropertyName, Object> property : getCurrentGeoEvent().getProperties())
{
if (!geoEventOut.hasProperty(property.getKey()))
{
geoEventOut.setProperty(property.getKey(), property.getValue());
}
}
}
catch (Exception e1)
{
e1.printStackTrace();
}
return geoEventOut;
}
public Date getTimestamp()
{
//Should this be the timestamp of the incoming geoevent or the calculated time?
return getCurrentGeoEvent().getStartTime();
}
public Double getDistance()
{
return distance;
}
public Double getTimespanSeconds()
{
return timespanSeconds;
}
public Double getSpeed()
{
return speed;
}
public Double getHeadingDegrees()
{
return headingDegrees;
}
public Double getMinDistance()
{
return minDistance;
}
public Double getMaxDistance()
{
return maxDistance;
}
public Double getAvgDistance()
{
return avgDistance;
}
public Double getMinSpeed()
{
return minSpeed;
}
public Double getMaxSpeed()
{
return maxSpeed;
}
public Double getAvgSpeed()
{
return avgSpeed;
}
public Double getMinTime()
{
return minTimespan;
}
public Double getAvgTime()
{
return avgTimespan;
}
public Double getMaxTime()
{
return maxTimespan;
}
public Double getMinAcceleration()
{
return minAcceleration;
}
public Double getAvgAcceleration()
{
return avgAcceleration;
}
public Double getMaxAcceleration()
{
return maxAcceleration;
}
public Double getAcceleration()
{
return acceleration;
}
public Date getPredictiveTime()
{
Long timespan = getCurrentGeoEvent().getStartTime().getTime() + (predictiveTimespan * 1000);
Date pt = new Date();
pt.setTime(timespan);
predictiveTime = pt;
return predictiveTime;
}
public Geometry getPredictiveGeometry()
{
final Double R = 6356752.3142 / 1000.0; // Radious of the earth in km
double earthRadius = R;
double predictiveDistance = speed * (predictiveTimespan/ 3600.0); // seconds to hours
if (distanceUnit == "Miles")
{
predictiveDistance *= 0.621371; // Convert KMs to Miles -- will affect all
earthRadius *= 0.621371;
}
else if (distanceUnit == "Nautical Miles")
{
predictiveDistance *= 0.539957; // Convert KMs to Nautical Miles
earthRadius *= 0.539957;
}
if (notificationMode == MotionCalculatorNotificationMode.Continuous)
{
System.out.println("continuous prediction");
Date currentDate = new Date();
double timespanToCurrentTime = (currentDate.getTime() - getCurrentGeoEvent().getStartTime().getTime()) / 1000.0; // convert to seconds
predictiveDistance = speed * (timespanToCurrentTime / 3600.0); // seconds to hours
}
double distRatio = predictiveDistance / earthRadius;
double distRatioSine = Math.sin(distRatio);
double distRatioCosine = Math.cos(distRatio);
Point currentPoint = (Point)getCurrentGeoEvent().getGeometry();
double startLonRad = toRadians(currentPoint.getX());
double startLatRad = toRadians(currentPoint.getY());
double startLatCos = Math.cos(startLatRad);
double startLatSin = Math.sin(startLatRad);
double endLatRads = Math.asin((startLatSin * distRatioCosine) + (startLatCos * distRatioSine * Math.cos(toRadians(headingDegrees))));
double endLonRads = startLonRad + Math.atan2(Math.sin(toRadians(headingDegrees)) * distRatioSine * startLatCos,
distRatioCosine - startLatSin * Math.sin(endLatRads));
double newLat = toDegrees(endLatRads);
double newLong = toDegrees(endLonRads);
if (predictiveGeometryType.equals("Point"))
{
return spatial.createPoint(newLong, newLat, currentPoint.getZ(), 4326);
}
else
{
Polyline polyline = spatial.createPolyline();
polyline.startPath(currentPoint.getX(), currentPoint.getY(), currentPoint.getZ());
polyline.lineTo(newLong, newLat, currentPoint.getZ()); //TODO: calculate new Z from Slope
return polyline;
}
}
public GeoEvent getPreviousGeoEvent()
{
return previousGeoEvent;
}
public GeoEvent getCurrentGeoEvent()
{
return currentGeoEvent;
}
public Double getSlope()
{
return slope;
}
public void setSlope(Double slope)
{
this.slope = slope;
}
public Double getMinSlope()
{
return minSlope;
}
public void setMinSlope(Double minSlope)
{
this.minSlope = minSlope;
}
public Double getMaxSlope()
{
return maxSlope;
}
public void setMaxSlope(Double maxSlope)
{
this.maxSlope = maxSlope;
}
public Double getAvgSlope()
{
return avgSlope;
}
public void setAvgSlope(Double avgSlope)
{
this.avgSlope = avgSlope;
}
public Double getHeight()
{
return height;
}
public Double getMinHeight()
{
return minHeight;
}
public Double getAvgHeight()
{
return avgHeight;
}
public Double getMaxHeight()
{
return maxHeight;
}
}
class ClearCacheTask extends TimerTask
{
public void run()
{
if (autoResetCache == true)
{
// clear the cache
if (clearCache == true)
{
motionElementsCache.clear();
}
}
}
}
class ReportGenerator implements Runnable
{
private Long reportInterval = 5000L;
public ReportGenerator(Long reportInterval)
{
this.reportInterval = reportInterval;
}
@Override
public void run()
{
while (isReporting)
{
try
{
Thread.sleep(reportInterval);
if (notificationMode != MotionCalculatorNotificationMode.Continuous)
{
continue;
}
for (String trackId : motionElementsCache.keySet())
{
MotionElements motionEle = motionElementsCache.get(trackId);
try
{
GeoEvent outGeoEvent = motionEle.createMotionGeoEvent();
if (outGeoEvent == null)
{
System.out.println("outGeoEvent is null");
continue;
}
System.out.print("send");
System.out.print(outGeoEvent.toString());
send(outGeoEvent);
}
catch (MessagingException e)
{
log.error("Error sending update GeoEvent for " + trackId, e);
}
}
}
catch (InterruptedException e1)
{
log.error(e1);
}
}
}
}
protected MotionCalculator(GeoEventProcessorDefinition definition) throws ComponentException
{
super(definition);
}
public void afterPropertiesSet()
{
newGeoEventDefinitionName = getProperty("newGeoEventDefinitionName").getValueAsString();
distanceUnit = getProperty("distanceUnit").getValueAsString();
geometryType = getProperty("geometryType").getValueAsString();
notificationMode = Validator.validateEnum(MotionCalculatorNotificationMode.class, getProperty("notificationMode").getValueAsString(), MotionCalculatorNotificationMode.OnChange);
reportInterval = Converter.convertToInteger(getProperty("reportInterval").getValueAsString(), 10) * 1000;
autoResetCache = Converter.convertToBoolean(getProperty("autoResetCache").getValueAsString());
clearCache = Converter.convertToBoolean(getProperty("clearCache").getValueAsString());
predictiveGeometryType = getProperty("predictiveGeometryType").getValueAsString();
predictiveTimespan = Converter.convertToInteger(getProperty("predictiveTimespan").getValueAsString(), 10) * 1000; // convert
// milliseconds
String[] resetTimeStr = getProperty("resetTime").getValueAsString().split(":");
// Get the Date corresponding to 11:01:00 pm today.
Calendar calendar = Calendar.getInstance();
calendar.set(Calendar.HOUR_OF_DAY, Integer.parseInt(resetTimeStr[0]));
calendar.set(Calendar.MINUTE, Integer.parseInt(resetTimeStr[1]));
calendar.set(Calendar.SECOND, Integer.parseInt(resetTimeStr[2]));
resetTime = calendar.getTime();
}
@Override
public void setId(String id)
{
System.out.print("setId " + id);
super.setId(id);
destination = new EventDestination(getId() + ":event");
geoEventProducer = messaging.createGeoEventProducer(destination.getName());
}
@Override
public GeoEvent process(GeoEvent geoevent) throws Exception
{
String trackId = geoevent.getTrackId();
MotionElements motionEle;
if (motionElementsCache.containsKey(trackId) == false)
{
motionEle = new MotionElements(geoevent);
}
else
{
motionEle = motionElementsCache.get(trackId);
motionEle.setGeoEvent(geoevent);
motionEle.calculateAndSendReport();
}
// Need to synchronize the Concurrent Map on write to avoid wrong counting
synchronized (lock1)
{
motionElementsCache.put(trackId, motionEle);
}
return null;
}
@Override
public List<EventDestination> getEventDestinations()
{
return Arrays.asList(destination);
}
@Override
public void validate() throws ValidationException
{
super.validate();
List<String> errors = new ArrayList<String>();
if (reportInterval <= 0)
errors.add("'" + definition.getName() + "' property 'reportInterval' is invalid.");
if (errors.size() > 0)
{
StringBuffer sb = new StringBuffer();
for (String message : errors)
sb.append(message).append("\n");
throw new ValidationException(this.getClass().getName() + " validation failed: " + sb.toString());
}
}
@Override
public void onServiceStart()
{
if (this.autoResetCache == true || this.clearCache == true)
{
if (clearCacheTimer == null)
{
// Get the Date corresponding to 11:01:00 pm today.
Calendar calendar1 = Calendar.getInstance();
calendar1.setTime(resetTime);
Date time1 = calendar1.getTime();
clearCacheTimer = new Timer();
Long dayInMilliSeconds = 60 * 60 * 24 * 1000L;
clearCacheTimer.scheduleAtFixedRate(new ClearCacheTask(), time1, dayInMilliSeconds);
}
motionElementsCache.clear();
}
isReporting = true;
if (definition != null)
{
definitionUri = definition.getUri();
definitionUriString = definitionUri.toString();
}
ReportGenerator reportGen = new ReportGenerator(reportInterval);
Thread t = new Thread(reportGen);
t.setName("MotionCalculator Report Generator");
t.start();
}
@Override
public void onServiceStop()
{
if (clearCacheTimer != null)
{
clearCacheTimer.cancel();
}
isReporting = false;
}
@Override
public void shutdown()
{
super.shutdown();
if (clearCacheTimer != null)
{
clearCacheTimer.cancel();
}
clearGeoEventDefinitionMapper();
}
@Override
public EventDestination getEventDestination()
{
return destination;
}
@Override
public void send(GeoEvent geoEvent) throws MessagingException
{
// Try to get it again
if (geoEventProducer == null)
{
destination = new EventDestination(getId() + ":event");
geoEventProducer = messaging.createGeoEventProducer(destination.getName());
}
if (geoEventProducer != null && geoEvent != null)
{
geoEventProducer.send(geoEvent);
}
}
public void setMessaging(Messaging messaging)
{
this.messaging = messaging;
geoEventCreator = messaging.createGeoEventCreator();
}
public void setSpatial(Spatial spatial)
{
this.spatial = spatial;
}
public void setGeoEventDefinitionManager(GeoEventDefinitionManager geoEventDefinitionManager)
{
this.geoEventDefinitionManager = geoEventDefinitionManager;
}
private List<FieldDefinition> createFieldDefinitionList()
{
List<FieldDefinition> fdsMC = new ArrayList<FieldDefinition>();
try
{
fdsMC.add(new DefaultFieldDefinition("distance", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("height", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("timespan", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("speed", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("heading", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("slope", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("minTimespan", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("maxTimespan", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("avgTimespan", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("minDistance", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("maxDistance", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("avgDistance", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("minHeight", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("maxHeight", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("avgHeight", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("minSpeed", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("maxSpeed", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("avgSpeed", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("minAcceleration", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("maxAcceleration", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("avgAcceleration", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("minSlope", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("maxSlope", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("avgSlope", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("cumulativeDistance", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("cumulativeHeight", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("cumulativeTime", FieldType.Double));
fdsMC.add(new DefaultFieldDefinition("calculatedAt", FieldType.Date));
fdsMC.add(new DefaultFieldDefinition("predictiveTime", FieldType.Date));
fdsMC.add(new DefaultFieldDefinition("predictivePosition", FieldType.Geometry));
}
catch (Exception e)
{
}
return fdsMC;
}
private Object[] createMotionGeoEventFields(String trackId, MotionElements motionElements)
{
List<Object> motionFieldList = new ArrayList<Object>();
motionFieldList.add(motionElements.getDistance());
motionFieldList.add(motionElements.getHeight());
motionFieldList.add(motionElements.getTimespanSeconds());
motionFieldList.add(motionElements.getSpeed());
motionFieldList.add(motionElements.getHeadingDegrees());
motionFieldList.add(motionElements.getSlope());
motionFieldList.add(motionElements.getMinTime());
motionFieldList.add(motionElements.getMaxTime());
motionFieldList.add(motionElements.getAvgTime());
motionFieldList.add(motionElements.getMinDistance());
motionFieldList.add(motionElements.getMaxDistance());
motionFieldList.add(motionElements.getAvgDistance());
motionFieldList.add(motionElements.getMinHeight());
motionFieldList.add(motionElements.getMaxHeight());
motionFieldList.add(motionElements.getAvgHeight());
motionFieldList.add(motionElements.getMinSpeed());
motionFieldList.add(motionElements.getMaxSpeed());
motionFieldList.add(motionElements.getAvgSpeed());
motionFieldList.add(motionElements.getMinAcceleration());
motionFieldList.add(motionElements.getMaxAcceleration());
motionFieldList.add(motionElements.getAvgAcceleration());
motionFieldList.add(motionElements.getMinSlope());
motionFieldList.add(motionElements.getMaxSlope());
motionFieldList.add(motionElements.getAvgSlope());
motionFieldList.add(motionElements.getCumulativeDistance());
motionFieldList.add(motionElements.getCumulativeHeight());
motionFieldList.add(motionElements.getCumulativeTime());
motionFieldList.add(motionElements.getTimestamp());
motionFieldList.add(motionElements.getPredictiveTime());
motionFieldList.add(motionElements.getPredictiveGeometry());
return motionFieldList.toArray();
}
synchronized private GeoEventDefinition lookupAndCreateEnrichedDefinition(GeoEventDefinition edIn) throws Exception
{
if (edIn == null)
{
System.out.println("edIn is null");
return null;
}
GeoEventDefinition edOut = edMapper.containsKey(edIn.getGuid()) ? geoEventDefinitionManager.getGeoEventDefinition(edMapper.get(edIn.getGuid())) : null;
if (edOut == null)
{
edOut = edIn.augment(createFieldDefinitionList());
edOut.setName(newGeoEventDefinitionName);
edOut.setOwner(getId());
geoEventDefinitionManager.addTemporaryGeoEventDefinition(edOut, newGeoEventDefinitionName.isEmpty());
edMapper.put(edIn.getGuid(), edOut.getGuid());
}
return edOut;
}
synchronized private void clearGeoEventDefinitionMapper()
{
if (!edMapper.isEmpty())
{
for (String guid : edMapper.values())
{
try
{
geoEventDefinitionManager.deleteGeoEventDefinition(guid);
}
catch (GeoEventDefinitionManagerException e)
{
;
}
}
edMapper.clear();
}
}
/*
* Returns distance in KMs.
*/
private static Double lawOfCosineDistance(Double lon1, Double lat1, Double lon2, Double lat2)
{
final Double R = 6356752.3142 / 1000.0; // Radious of the earth in km
Double radLon1 = toRadians(lon1);
Double radLat1 = toRadians(lat1);
Double radLon2 = toRadians(lon2);
Double radLat2 = toRadians(lat2);
return Math.acos(Math.sin(radLat1) * Math.sin(radLat2) + Math.cos(radLat1) * Math.cos(radLat2) * Math.cos(radLon2 - radLon1)) * R;
}
@SuppressWarnings("unused")
private static Double halversineDistance(Double lon1, Double lat1, Double lon2, Double lat2)
{
final Double R = 6356752.3142 / 1000.0; // Radious of the earth in km
Double latDistance = toRadians(lat2 - lat1);
Double lonDistance = toRadians(lon2 - lon1);
Double a = Math.sin(latDistance / 2.0) * Math.sin(latDistance / 2.0) + Math.cos(toRadians(lat1)) * Math.cos(toRadians(lat2)) * Math.sin(lonDistance / 2.0) * Math.sin(lonDistance / 2.0);
Double c = 2.0 * Math.atan2(Math.sqrt(a), Math.sqrt(1 - a));
Double distance = R * c;
return distance;
}
/*
* Returns heading in degrees
*/
private static Double heading(Double lon1, Double lat1, Double lon2, Double lat2)
{
Double radLon1 = toRadians(lon1);
Double radLat1 = toRadians(lat1);
Double radLon2 = toRadians(lon2);
Double radLat2 = toRadians(lat2);
Double y = Math.sin(radLon2 - radLon1) * Math.cos(radLat2);
Double x = Math.cos(radLat1) * Math.sin(radLat2) - Math.sin(radLat1) * Math.cos(radLat2) * Math.cos(radLon2 - radLon1);
/*
* Without using Math.atan2() Double headingDegrees = 0.0; if (y > 0) { if
* (x > 0) { headingDegrees = toDegrees(Math.atan(y/x)); } if (x < 0) {
* headingDegrees = 180.0 - toDegrees(Math.atan(-y/x)); } if (x == 0){
* headingDegrees = 90.0; } } if (y < 0) { if (x > 0) { headingDegrees =
* toDegrees(-Math.atan(-y/x));} if (x < 0) { headingDegrees =
* toDegrees(Math.atan(y/x))-180.0; } if (x == 0){ headingDegrees = 270.0; }
* } if (y == 0) { if (x > 0) { headingDegrees = 0.0; } if (x < 0) {
* headingDegrees = 180.0; } if (x == 0){ headingDegrees = Double.NaN; }
* //the 2 points are the same }
*/
Double headingDegrees = toDegrees(Math.atan2(y, x) % (2.0 * Math.PI));
return headingDegrees;
}
private static Double toRadians(Double value)
{
return value * Math.PI / 180.0;
}
private static Double toDegrees(Double value)
{
return value * 180.0 / Math.PI;
}
}
|
package org.opendaylight.ovsdb.openstack.netvirt.api;
import com.google.common.collect.Maps;
import java.util.Map;
/**
* Store configuration for each load balancer instance created.
*/
public class LoadBalancerConfiguration {
public static final String PROTOCOL_TCP = "TCP";
public static final String PROTOCOL_HTTP = "HTTP";
public static final String PROTOCOL_HTTPS = "HTTPS";
public class LoadBalancerPoolMember {
String ipAddr;
String macAddr;
String protocol;
Integer port;
int index;
public LoadBalancerPoolMember(String ipAddr, String macAddr, String protocol, Integer port) {
this.ipAddr = ipAddr;
this.macAddr = macAddr;
this.protocol = protocol;
this.port = port;
this.index = -1;
}
public LoadBalancerPoolMember(String ipAddr, String macAddr, String protocol, Integer port, int index) {
this.ipAddr = ipAddr;
this.macAddr = macAddr;
this.protocol = protocol;
this.port = port;
this.index = index;
}
public String getIP() {
return ipAddr;
}
public String getMAC() {
return macAddr;
}
public String getProtocol() {
return protocol;
}
public Integer getPort() {
return port;
}
public int getIndex() {
return index;
}
public void setIndex(int index) {
this.index = index;
}
public boolean equals(LoadBalancerPoolMember other) {
if (other.ipAddr != ipAddr)
return false;
else if (other.macAddr != macAddr)
return false;
else if (other.protocol != protocol)
return false;
else if (other.port != port)
return false;
//Ignore Index
return true;
}
@Override
public String toString() {
return "LoadBalancerPoolMember [ip=" + ipAddr + ", mac=" + macAddr +
", protocol=" + protocol + ", port=" + port + ", index=" + index + "]";
}
@Override
public int hashCode() {
final int prime = 31;
int result = super.hashCode();
result = prime * result + ((ipAddr == null) ? 0 : ipAddr.hashCode());
result = prime * result + ((macAddr == null) ? 0 : macAddr.hashCode());
result = prime * result + ((protocol == null) ? 0 : protocol.hashCode());
result = prime * result + ((port == null) ? 0 : port.hashCode());
result = prime * result + index;
return result;
}
}
private String name;
private String vip;
private String vmac; //Used when a dummy neutron port is created for the VIP
private String providerNetworkType;
private String providerSegmentationId;
private Map <String, LoadBalancerPoolMember> members;
public LoadBalancerConfiguration() {
this.members = Maps.newHashMap();
}
public LoadBalancerConfiguration(String name, String vip) {
this.members = Maps.newHashMap();
this.name = name;
this.vip = vip;
this.vmac = null;
}
public LoadBalancerConfiguration(String name, String vip, String vmac) {
this.members = Maps.newHashMap();
this.name = name;
this.vip = vip;
this.vmac = vmac;
}
public LoadBalancerConfiguration(LoadBalancerConfiguration lbConfig) {
this.members = Maps.newHashMap(lbConfig.getMembers());
this.name = lbConfig.getName();
this.vip = lbConfig.getVip();
this.vmac = lbConfig.getVmac();
this.providerNetworkType = lbConfig.getProviderNetworkType();
this.providerSegmentationId = lbConfig.getProviderSegmentationId();
}
public Map<String, LoadBalancerPoolMember> getMembers() {
return this.members;
}
public Map<String, LoadBalancerPoolMember> addMember(String uuid, LoadBalancerPoolMember member) {
//If index is not set for this object, update it before inserting
if (member.getIndex() == -1)
member.setIndex(members.size());
this.members.put(uuid, member);
return this.members;
}
public Map<String, LoadBalancerPoolMember> addMember(String uuid, String ipAddr, String macAddr, String protocol, Integer port) {
this.members.put(uuid,
new LoadBalancerPoolMember(ipAddr, macAddr, protocol, port, members.size()));
return this.members;
}
public Map<String, LoadBalancerPoolMember> removeMember(String uuid) {
this.members.remove(uuid);
/* Update indices of all other members
*/
int index = 0;
for(Map.Entry<String, LoadBalancerPoolMember> entry : this.getMembers().entrySet())
((LoadBalancerPoolMember) entry.getValue()).setIndex(index++);
return this.members;
}
public boolean isValid() {
if (members.size() == 0)
return false;
else if (providerNetworkType == null)
return false;
return true;
}
public void setVip(String vip) {
this.vip = vip;
}
public String getVip() {
return this.vip;
}
public void setVmac(String vmac) {
this.vmac = vmac;
}
public String getVmac() {
return this.vmac;
}
public void setName(String name) {
this.name = name;
}
public String getName() {
return this.name;
}
public void setProviderSegmentationId(String providerSegmentationId) {
this.providerSegmentationId = providerSegmentationId;
}
public String getProviderSegmentationId() {
return this.providerSegmentationId;
}
public void setProviderNetworkType(String providerNetworkType) {
this.providerNetworkType = providerNetworkType;
}
public String getProviderNetworkType() {
return this.providerNetworkType;
}
@Override
public String toString() {
return "LoadBalancerConfiguration [name=" + name +
", vip=" + vip + ", vmac=" + vmac +
", networkType=" + providerNetworkType +
", segmentationId=" + providerSegmentationId +
", members=" + members + "]";
}
}
|
package ua.com.fielden.platform.domaintree.impl;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import ua.com.fielden.platform.domaintree.Function;
import ua.com.fielden.platform.domaintree.FunctionUtils;
import ua.com.fielden.platform.domaintree.ICalculatedProperty.CalculatedPropertyCategory;
import ua.com.fielden.platform.domaintree.IDomainTreeRepresentation;
import ua.com.fielden.platform.domaintree.impl.AbstractDomainTreeManager.ITickRepresentationWithMutability;
import ua.com.fielden.platform.entity.AbstractEntity;
import ua.com.fielden.platform.entity.AbstractUnionEntity;
import ua.com.fielden.platform.entity.annotation.Calculated;
import ua.com.fielden.platform.entity.annotation.CritOnly;
import ua.com.fielden.platform.entity.annotation.Ignore;
import ua.com.fielden.platform.entity.annotation.Invisible;
import ua.com.fielden.platform.entity.annotation.IsProperty;
import ua.com.fielden.platform.entity.annotation.KeyTitle;
import ua.com.fielden.platform.entity.annotation.KeyType;
import ua.com.fielden.platform.reflection.AnnotationReflector;
import ua.com.fielden.platform.reflection.Finder;
import ua.com.fielden.platform.reflection.PropertyTypeDeterminator;
import ua.com.fielden.platform.reflection.asm.impl.DynamicEntityClassLoader;
import ua.com.fielden.platform.reflection.development.EntityDescriptor;
import ua.com.fielden.platform.serialisation.api.ISerialiser;
import ua.com.fielden.platform.serialisation.impl.TgKryo;
import ua.com.fielden.platform.utils.EntityUtils;
import ua.com.fielden.platform.utils.Pair;
/**
* A base domain tree representation for all TG trees. Includes strict TG domain rules that should be used by all specific tree implementations. <br><br>
*
* @author TG Team
*
*/
public abstract class AbstractDomainTreeRepresentation extends AbstractDomainTree implements IDomainTreeRepresentationWithMutability {
private final EnhancementLinkedRootsSet rootTypes;
private final EnhancementSet manuallyExcludedProperties;
private final AbstractTickRepresentation firstTick;
private final AbstractTickRepresentation secondTick;
/** Please do not use this field directly, use {@link #includedPropertiesMutable(Class)} lazy getter instead. */
private final EnhancementRootsMap<ListenedArrayList> includedProperties;
private final transient List<IPropertyListener> propertyListeners, disabledPropertyListeners;
/**
* A <i>representation</i> constructor. Initialises also children references on itself.
*/
protected AbstractDomainTreeRepresentation(final ISerialiser serialiser, final Set<Class<?>> rootTypes, final Set<Pair<Class<?>, String>> excludedProperties, final AbstractTickRepresentation firstTick, final AbstractTickRepresentation secondTick, final EnhancementRootsMap<ListenedArrayList> includedProperties) {
super(serialiser);
this.rootTypes = new EnhancementLinkedRootsSet();
this.rootTypes.addAll(rootTypes);
this.manuallyExcludedProperties = createSet();
this.manuallyExcludedProperties.addAll(excludedProperties);
this.firstTick = firstTick;
this.secondTick = secondTick;
propertyListeners = new ArrayList<IPropertyListener>();
disabledPropertyListeners = new ArrayList<IPropertyListener>();
// initialise the references on this instance in its children
try {
final Field dtrField = Finder.findFieldByName(AbstractTickRepresentation.class, "dtr");
final boolean isAccessible = dtrField.isAccessible();
dtrField.setAccessible(true);
dtrField.set(firstTick, this);
dtrField.set(secondTick, this);
dtrField.setAccessible(isAccessible);
} catch (final Exception e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
// this field unfortunately should be lazy loaded due to heavy-weight nature (deep, circular tree of properties)
this.includedProperties = createRootsMap();
this.includedProperties.putAll(includedProperties);
for (final Entry<Class<?>, ListenedArrayList> entry : this.includedProperties.entrySet()) {
// initialise the references on this instance in "included properties" lists
try {
final Field rootField = Finder.findFieldByName(ListenedArrayList.class, "root");
final boolean isAccessible = rootField.isAccessible();
rootField.setAccessible(true);
rootField.set(entry.getValue(), entry.getKey());
rootField.setAccessible(isAccessible);
} catch (final Exception e) {
e.printStackTrace();
throw new IllegalStateException(e);
}
}
}
/**
* Constructs recursively the list of properties using given list of fields.
*
* @param rootType
* @param path
* @param fieldsAndKeys
* @return
*/
private List<String> constructProperties(final Class<?> managedType, final String path, final List<Field> fieldsAndKeys) {
final List<String> newIncludedProps = new ArrayList<String>();
for (final Field field : fieldsAndKeys) {
final String property = StringUtils.isEmpty(path) ? field.getName() : path + "." + field.getName();
final String reflectionProperty = reflectionProperty(property);
if (!isExcludedImmutably(managedType, reflectionProperty)) {
newIncludedProps.add(property);
// determine the type of property, which can be a) "union entity" property b) property under "union entity" c) collection property d) entity property e) simple property
final Pair<Class<?>, String> penultAndLast = PropertyTypeDeterminator.transform(managedType, reflectionProperty);
final Class<?> parentType = penultAndLast.getKey();
final Class<?> propertyType = PropertyTypeDeterminator.determineClass(parentType, penultAndLast.getValue(), true, true);
// add the children for "property" based on its nature
if (EntityUtils.isEntityType(propertyType)) {
final boolean propertyTypeWasInHierarchyBefore = typesInHierarchy(managedType, reflectionProperty, true).contains(DynamicEntityClassLoader.getOriginalType(propertyType));
// final boolean isKeyPart = Finder.getKeyMembers(parentType).contains(field); // indicates if field is the part of the key.
final boolean isEntityItself = "".equals(property); // empty property means "entity itself"
final Pair<Class<?>, String> transformed = PropertyTypeDeterminator.transform(managedType, property);
final String penultPropertyName = PropertyTypeDeterminator.isDotNotation(property) ? PropertyTypeDeterminator.penultAndLast(property).getKey() : null;
final String lastPropertyName = transformed.getValue();
final boolean isLinkProperty = !isEntityItself && PropertyTypeDeterminator.isDotNotation(property) && Finder.isOne2Many_or_One2One_association(managedType, penultPropertyName) && lastPropertyName.equals(Finder.findLinkProperty((Class<? extends AbstractEntity<?>>) managedType, penultPropertyName)); // exclude link properties in one2many and one2one associations
if (propertyTypeWasInHierarchyBefore && !isLinkProperty/*!isKeyPart*/) {
newIncludedProps.add(createDummyMarker(property));
} else if (EntityUtils.isUnionEntityType(propertyType)) { // "union entity" property
final Pair<List<Field>, List<Field>> commonAndUnion = commonAndUnion((Class<? extends AbstractUnionEntity>) propertyType);
// a new tree branch should be created for "common" properties under "property"
final String commonBranch = createCommonBranch(property);
newIncludedProps.add(commonBranch); // final DefaultMutableTreeNode nodeForCommonProperties = addHotNode("common", null, false, klassNode, new Pair<String, String>("Common", TitlesDescsGetter.italic("<b>Common properties</b>")));
newIncludedProps.addAll(constructProperties(managedType, commonBranch, commonAndUnion.getKey()));
// "union" properties should be added directly to "property"
newIncludedProps.addAll(constructProperties(managedType, property, commonAndUnion.getValue()));
} else if (EntityUtils.isUnionEntityType(parentType)) { // property under "union entity"
// the property under "union entity" should have only "non-common" properties added
final List<Field> propertiesWithoutCommon = constructKeysAndProperties(propertyType);
final List<String> parentCommonNames = AbstractUnionEntity.commonProperties((Class<? extends AbstractUnionEntity>) parentType);
propertiesWithoutCommon.removeAll(constructKeysAndProperties(propertyType, parentCommonNames));
newIncludedProps.addAll(constructProperties(managedType, property, propertiesWithoutCommon));
} else { // collectional or non-collectional entity property
newIncludedProps.addAll(constructProperties(managedType, property, constructKeysAndProperties(propertyType)));
}
}
}
}
return newIncludedProps;
}
@Override
public Set<Pair<Class<?>, String>> excludedPropertiesMutable() {
return manuallyExcludedProperties;
}
/**
* Determines the lists of common and union fields for concrete union entity type.
*
* @param unionClass
* @return
*/
private static Pair<List<Field>, List<Field>> commonAndUnion(final Class<? extends AbstractUnionEntity> unionClass) {
final List<Field> unionProperties = AbstractUnionEntity.unionProperties(unionClass);
final Class<? extends AbstractEntity> concreteUnionClass = (Class<? extends AbstractEntity>) unionProperties.get(0).getType();
final List<String> commonNames = AbstractUnionEntity.commonProperties(unionClass);
final List<Field> commonProperties = constructKeysAndProperties(concreteUnionClass, commonNames);
return new Pair<List<Field>, List<Field>>(commonProperties, unionProperties);
}
/**
* Forms a list of fields for "type" in order ["key" or key members => "desc" (if exists) => other properties in order as declared in domain].
*
* @param type
* @return
*/
private static List<Field> constructKeysAndProperties(final Class<?> type) {
final List<Field> properties = Finder.findProperties(type);
properties.remove(Finder.getFieldByName(type, AbstractEntity.KEY));
properties.remove(Finder.getFieldByName(type, AbstractEntity.DESC));
final List<Field> keys = Finder.getKeyMembers(type);
properties.removeAll(keys);
final List<Field> fieldsAndKeys = new ArrayList<Field>();
fieldsAndKeys.addAll(keys);
fieldsAndKeys.add(Finder.getFieldByName(type, AbstractEntity.DESC));
fieldsAndKeys.addAll(properties);
return fieldsAndKeys;
}
/**
* Forms a list of fields for "type" in order ["key" or key members => "desc" (if exists) => other properties in order as declared in domain] and chooses only fields with <code>names</code>.
*
* @param type
* @param names
* @return
*/
private static List<Field> constructKeysAndProperties(final Class<?> type, final List<String> names) {
final List<Field> allProperties = constructKeysAndProperties(type);
final List<Field> properties = new ArrayList<Field>();
for (final Field f : allProperties) {
if (names.contains(f.getName())) {
properties.add(f);
}
}
return properties;
}
/**
* Returns <code>true</code> if property is collection itself.
*
* @param root
* @param property
* @return
*/
protected static boolean isCollection(final Class<?> root, final String property) {
final boolean isEntityItself = "".equals(property); // empty property means "entity itself"
if (isEntityItself) {
return false;
}
final Pair<Class<?>, String> penultAndLast = PropertyTypeDeterminator.transform(root, property);
final Class<?> realType = isEntityItself ? null : PropertyTypeDeterminator.determineClass(penultAndLast.getKey(), penultAndLast.getValue(), true, false);
return !isEntityItself && realType != null && Collection.class.isAssignableFrom(realType); // or collections itself
}
/**
* Returns parent collection for specified property.
*
* @param root
* @param property
* @return
*/
public static String parentCollection(final Class<?> root, final String property) {
if (!isCollectionOrInCollectionHierarchy(root, property)) {
throw new IllegalArgumentException("The property [" + property + "] is not in collection hierarchy.");
}
return isCollection(root, property) ? property : parentCollection(root, PropertyTypeDeterminator.penultAndLast(property).getKey());
}
/**
* Returns <code>true</code> if property is in collectional hierarchy.
*
* @param root
* @param property
* @return
*/
public static boolean isInCollectionHierarchy(final Class<?> root, final String property) {
final boolean isEntityItself = "".equals(property); // empty property means "entity itself"
return !isEntityItself && typesInHierarchy(root, property, false).contains(Collection.class); // properties in collectional hierarchy
}
/**
* Returns <code>true</code> if property is in collectional hierarchy or is collection itself.
*
* @param root
* @param property
* @return
*/
public static boolean isCollectionOrInCollectionHierarchy(final Class<?> root, final String property) {
return isCollection(root, property) || isInCollectionHierarchy(root, property);
}
@Override
public boolean isExcludedImmutably(final Class<?> root, final String property) {
final boolean isEntityItself = "".equals(property); // empty property means "entity itself"
final Pair<Class<?>, String> transformed = PropertyTypeDeterminator.transform(root, property);
final String penultPropertyName = PropertyTypeDeterminator.isDotNotation(property) ? PropertyTypeDeterminator.penultAndLast(property).getKey() : null;
final Class<?> penultType = transformed.getKey();
final String lastPropertyName = transformed.getValue();
final Class<?> propertyType = isEntityItself ? root : PropertyTypeDeterminator.determineClass(penultType, lastPropertyName, true, true);
final Class<?> notEnhancedRoot = DynamicEntityClassLoader.getOriginalType(root);
final Field field = isEntityItself ? null : Finder.getFieldByName(penultType, lastPropertyName);
return manuallyExcludedProperties.contains(key(root, property)) || // exclude manually excluded properties
!isEntityItself && AbstractEntity.KEY.equals(lastPropertyName) && propertyType == null || // exclude "key" -- no KeyType annotation exists in direct owner of "key"
!isEntityItself && AbstractEntity.KEY.equals(lastPropertyName) && !AnnotationReflector.isAnnotationPresent(KeyTitle.class, penultType) || // exclude "key" -- no KeyTitle annotation exists in direct owner of "key"
!isEntityItself && AbstractEntity.KEY.equals(lastPropertyName) && !EntityUtils.isEntityType(propertyType) || // exclude "key" -- "key" is not of entity type
!isEntityItself && AbstractEntity.DESC.equals(lastPropertyName) && !EntityDescriptor.hasDesc(penultType) || // exclude "desc" -- no DescTitle annotation exists in direct owner of "desc"
!isEntityItself && !Finder.findFieldByName(root, property).isAnnotationPresent(IsProperty.class) || // exclude non-TG properties (not annotated by @IsProperty)
isEntityItself && !rootTypes().contains(propertyType) || // exclude entities of non-"root types"
EntityUtils.isEnum(propertyType) || // exclude enumeration properties / entities
EntityUtils.isEntityType(propertyType) && Modifier.isAbstract(propertyType.getModifiers()) || // exclude properties / entities of entity type with 'abstract' modifier
EntityUtils.isEntityType(propertyType) && !AnnotationReflector.isAnnotationPresent(KeyType.class, propertyType) || // exclude properties / entities of entity type without KeyType annotation
!isEntityItself && AnnotationReflector.isPropertyAnnotationPresent(Invisible.class, penultType, lastPropertyName) || // exclude invisible properties
!isEntityItself && AnnotationReflector.isPropertyAnnotationPresent(Ignore.class, penultType, lastPropertyName) || // exclude invisible properties
// !isEntityItself && Finder.getKeyMembers(penultType).contains(field) && typesInHierarchy(root, property, true).contains(DynamicEntityClassLoader.getOriginalType(propertyType)) || // exclude key parts which type was in hierarchy
!isEntityItself && PropertyTypeDeterminator.isDotNotation(property) && Finder.isOne2Many_or_One2One_association(notEnhancedRoot, penultPropertyName) && lastPropertyName.equals(Finder.findLinkProperty((Class<? extends AbstractEntity<?>>) notEnhancedRoot, penultPropertyName)) || // exclude link properties in one2many and one2one associations
!isEntityItself && PropertyTypeDeterminator.isDotNotation(property) && AnnotationReflector.isAnnotationPresentInHierarchy(CritOnly.class, root, penultPropertyName) || // exclude property if it is a child of other AE crit-only property (collection)
!isEntityItself && isExcludedImmutably(root, PropertyTypeDeterminator.isDotNotation(property) ? penultPropertyName : ""); // exclude property if it is an ascender (any level) of already excluded property
}
/**
* Finds a complete set of <b>NOT ENHANCED</b> types in hierarchy of dot-notation expression, excluding the type of last property and including the type of root class.<br><br>
*
* E.g. : "WorkOrder$$1.vehicle.fuelUsages.vehicle.fuelCards.initDate" => <br>
* => [WorkOrder.class, Vehicle.class, FuelUsage.class, FuelCard.class] (if addCollectionalElementType = true) or <br>
* => [WorkOrder.class, Vehicle.class, Collection.class] (if addCollectionalElementType = false)
*
* @param root
* @param property
* @param addCollectionalElementType -- true => then correct element type of collectional property will be added to set, otherwise a {@link Collection.class} will be added.
* @return
*/
protected static Set<Class<?>> typesInHierarchy(final Class<?> root, final String property, final boolean addCollectionalElementType) {
if (!PropertyTypeDeterminator.isDotNotation(property)) {
return new HashSet<Class<?>>() {
private static final long serialVersionUID = 6314144790005942324L;
{
add(DynamicEntityClassLoader.getOriginalType(root));
}
};
} else {
final Pair<String, String> penultAndLast = PropertyTypeDeterminator.penultAndLast(property);
final String penult = penultAndLast.getKey();
final Pair<Class<?>, String> transformed = PropertyTypeDeterminator.transform(root, penult);
return new HashSet<Class<?>>() {
private static final long serialVersionUID = 6314144760005942324L;
{
if (addCollectionalElementType) {
add(DynamicEntityClassLoader.getOriginalType(PropertyTypeDeterminator.determineClass(transformed.getKey(), transformed.getValue(), true, true)));
} else {
final Class<?> type = PropertyTypeDeterminator.determineClass(transformed.getKey(), transformed.getValue(), true, false);
add(DynamicEntityClassLoader.getOriginalType(Collection.class.isAssignableFrom(type) ? Collection.class : type));
}
addAll(typesInHierarchy(root, PropertyTypeDeterminator.penultAndLast(property).getKey(), addCollectionalElementType)); // recursively add other types
}
};
}
}
@Override
public void excludeImmutably(final Class<?> root, final String property) {
manuallyExcludedProperties.add(key(root, property));
if (includedProperties.get(root) != null) { // not yet loaded
includedPropertiesMutable(root).remove(property);
}
}
/**
* An {@link ArrayList} specific implementation which listens to structure modifications (add / remove elements) and fires appropriate events.
*
* @author TG Team
*
*/
public static class ListenedArrayList extends ArrayList<String> {
private static final long serialVersionUID = -4295706377290507263L;
private final transient Class<?> root;
private final transient AbstractDomainTreeRepresentation parentDtr;
public ListenedArrayList() {
this(null, null);
}
public ListenedArrayList(final Class<?> root, final AbstractDomainTreeRepresentation parentDtr) {
super();
this.root = root;
this.parentDtr = parentDtr;
}
private void fireProperty(final Class<?> root, final String property, final boolean added) {
if (parentDtr != null) {
for (final IPropertyListener listener : parentDtr.propertyListeners) {
listener.propertyStateChanged(root, property, added, null);
}
}
}
@Override
public boolean add(final String property) {
final boolean added = super.add(property);
if (added) {
fireProperty(root, property, true);
}
return added;
}
@Override
public void add(final int index, final String property) {
super.add(index, property);
fireProperty(root, property, true);
}
@Override
public boolean remove(final Object obj) {
final boolean removed = super.remove(obj);
if (removed) {
final String property = (String) obj;
fireProperty(root, property, false);
}
return removed;
}
@Override
public boolean addAll(final Collection<? extends String> properties) {
final boolean added = super.addAll(properties);
if (added) {
for (final String property : properties) {
fireProperty(root, property, true);
}
}
return added;
}
@Override
public boolean addAll(final int index, final Collection<? extends String> properties) {
final boolean added = super.addAll(index, properties);
if (added) {
for (final String property : properties) {
fireProperty(root, property, true);
}
}
return added;
}
}
/**
* Getter of mutable "included properties" cache for internal purposes.
* <p>
* Please note that you can only mutate this list with methods {@link List#add(Object)} and {@link List#remove(Object)}
* to correctly reflect the changes on depending objects. (e.g. UI tree models, checked properties etc.)
*
* @param root
* @return
*/
@Override
public List<String> includedPropertiesMutable(final Class<?> managedType) {
final Class<?> root = DynamicEntityClassLoader.getOriginalType(managedType);
if (includedProperties.get(root) == null) { // not yet loaded
final Date st = new Date();
enableListening(false);
// initialise included properties using isExcluded contract and manually excluded properties
final ListenedArrayList includedProps = new ListenedArrayList(root, this);
if (!isExcludedImmutably(root, "")) { // the entity itself is included -- add it to "included properties" list
includedProps.add("");
if (!EntityUtils.isEntityType(root)) {
throw new IllegalArgumentException("Can not add children properties to non-entity type [" + root.getSimpleName() + "] in path [" + root.getSimpleName() + "=>" + "" + "].");
}
includedProps.addAll(constructProperties(managedType, "", constructKeysAndProperties(managedType)));
}
enableListening(true);
includedProperties.put(root, includedProps);
logger().info("Root [" + root.getSimpleName() + "] has been processed within " + (new Date().getTime() - st.getTime()) + "ms with " + includedProps.size() + " included properties."); // => [" + includedProps + "]
}
return includedProperties.get(root);
}
/**
* Enables or disables listening for each {@link ListenedArrayList} structures.
*
* @param enable
*/
private void enableListening(final boolean enable) {
if (enable) {
propertyListeners.addAll(disabledPropertyListeners);
disabledPropertyListeners.clear();
} else {
disabledPropertyListeners.addAll(propertyListeners);
propertyListeners.clear();
}
}
@Override
public boolean addPropertyListener(final IPropertyListener listener) {
return propertyListeners.add(listener);
}
@Override
public boolean removePropertyListener(final IPropertyListener listener) {
return propertyListeners.remove(listener);
}
@Override
public List<String> includedProperties(final Class<?> root) {
return Collections.unmodifiableList(includedPropertiesMutable(root));
}
/**
* This method loads all missing properties on the tree path as defined in <code>fromPath</code> and <code>toPath</code> for type <code>root</code>.
* Please note that property <code>fromPath</code> should be loaded itself (perhaps without its children).
*
* @param managedType
* @param fromPath
* @param toPath
*/
protected final boolean warmUp(final Class<?> managedType, final String fromPath, final String toPath) {
// System.out.println("Warm up => from = " + fromPath + "; to = " + toPath);
if (includedPropertiesMutable(managedType).contains(fromPath)) { // the property itself exists in "included properties" cache
final String dummyMarker = createDummyMarker(fromPath);
final boolean shouldBeLoaded = includedPropertiesMutable(managedType).contains(dummyMarker);
if (shouldBeLoaded) { // the property is circular and has no children loaded -- it has to be done now
final int index = includedPropertiesMutable(managedType).indexOf(dummyMarker);
includedPropertiesMutable(managedType).remove(dummyMarker); // remove dummy property
includedPropertiesMutable(managedType).addAll(index, constructProperties(managedType, fromPath, constructKeysAndProperties(PropertyTypeDeterminator.determinePropertyType(managedType, fromPath))));
}
if (!EntityUtils.equalsEx(fromPath, toPath)) { // not the leaf is trying to be warmed up
final String part = "".equals(fromPath) ? toPath : toPath.replaceFirst(fromPath + ".", "");
final String part2 = part.indexOf(".") > 0 ? part.substring(0, part.indexOf(".")) : part;
final String part3 = "".equals(fromPath) ? part2 : fromPath + "." + part2;
final boolean hasBeenWarmedUp = warmUp(managedType, part3, toPath);
return shouldBeLoaded || hasBeenWarmedUp;
} else {
return shouldBeLoaded;
}
} else {
throw new IllegalArgumentException("The property [" + fromPath + "] in root [" + managedType.getSimpleName() + "] should be already loaded into 'included properties'.");
}
}
@Override
public void warmUp(final Class<?> managedType, final String property) {
final Date st = new Date();
illegalExcludedProperties(this, managedType, reflectionProperty(property), "Could not 'warm up' an 'excluded' property [" + property + "] in type [" + managedType.getSimpleName() + "]. Only properties that are not excluded can be 'warmed up'.");
includedPropertiesMutable(managedType); // ensure "included properties" to be loaded
if (warmUp(managedType, "", property)) {
logger().info("Warmed up root's [" + managedType.getSimpleName() + "] property [" + property + "] within " + (new Date().getTime() - st.getTime()) + "ms.");
}
}
protected static void illegalExcludedProperties(final IDomainTreeRepresentation dtr, final Class<?> root, final String property, final String message) {
if (dtr.isExcludedImmutably(root, property)) {
throw new IllegalArgumentException(message);
}
}
/**
* An abstract tick representation. <br><br>
*
* Includes default implementations of "disabling/immutable checking", that contain: <br>
* a) manual state management; <br>
* b) resolution of conflicts with excluded properties; <br>
* c) automatic disabling of "immutably checked" properties.
*
* @author TG Team
*
*/
public static abstract class AbstractTickRepresentation implements ITickRepresentationWithMutability {
private final EnhancementSet disabledManuallyProperties;
private final transient AbstractDomainTreeRepresentation dtr;
private final transient List<IPropertyDisablementListener> propertyDisablementListeners;
/**
* Used for serialisation and for normal initialisation. IMPORTANT : To use this tick it should be passed into representation constructor and then into manager constructor, which should initialise "dtr" and "tickManager" fields.
*/
protected AbstractTickRepresentation() {
this.disabledManuallyProperties = createSet();
this.propertyDisablementListeners = new ArrayList<IPropertyDisablementListener>();
this.dtr = null; // IMPORTANT : to use this tick it should be passed into representation constructor, which should initialise "dtr" field.
}
@Override
public boolean isDisabledImmutably(final Class<?> root, final String property) {
illegalExcludedProperties(dtr, root, property, "Could not ask a 'disabled' state for already 'excluded' property [" + property + "] in type [" + root.getSimpleName() + "].");
return disabledManuallyProperties.contains(key(root, property)) || // disable manually disabled properties
isCheckedImmutably(root, property); // the checked by default properties should be disabled (immutable checking)
}
@Override
public void disableImmutably(final Class<?> root, final String property) {
illegalExcludedProperties(dtr, root, property, "Could not disable already 'excluded' property [" + property + "] in type [" + root.getSimpleName() + "].");
disabledManuallyProperties.add(key(root, property));
fireDisablingEvent(root, property);
}
@Override
public boolean addPropertyDisablementListener(final IPropertyDisablementListener listener) {
return propertyDisablementListeners.add(listener);
}
@Override
public boolean removePropertyDisablementListener(final IPropertyDisablementListener listener) {
return propertyDisablementListeners.remove(listener);
}
/**
* Fires disablement event for specified property (the property has been disabled successfully).
*
* @param root
* @param property
*/
private void fireDisablingEvent(final Class<?> root, final String property) {
// fire DISABLED event after successful "disabled" action
for (final IPropertyDisablementListener listener : propertyDisablementListeners) {
listener.propertyStateChanged(root, property, true, null);
}
}
@Override
public boolean isCheckedImmutably(final Class<?> root, final String property) {
illegalExcludedProperties(dtr, root, property, "Could not ask a 'checked' state for already 'excluded' property [" + property + "] in type [" + root.getSimpleName() + "].");
return false;
}
public AbstractDomainTreeRepresentation getDtr() {
return dtr;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (disabledManuallyProperties == null ? 0 : disabledManuallyProperties.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AbstractTickRepresentation other = (AbstractTickRepresentation) obj;
if (disabledManuallyProperties == null) {
if (other.disabledManuallyProperties != null) {
return false;
}
} else if (!disabledManuallyProperties.equals(other.disabledManuallyProperties)) {
return false;
}
return true;
}
@Override
public EnhancementSet disabledManuallyPropertiesMutable() {
return disabledManuallyProperties;
}
}
@Override
public ITickRepresentation getFirstTick() {
return firstTick;
}
@Override
public ITickRepresentation getSecondTick() {
return secondTick;
}
@Override
public Set<Class<?>> rootTypes() {
return rootTypes;
}
@Override
public Set<Function> availableFunctions(final Class<?> root, final String property) {
illegalExcludedProperties(this, root, property, "Could not ask for 'available functions' for already 'excluded' property [" + property + "] in type [" + root.getSimpleName() + "].");
final boolean isEntityItself = "".equals(property); // empty property means "entity itself"
final Class<?> propertyType = isEntityItself ? root : PropertyTypeDeterminator.determinePropertyType(root, property);
final Set<Function> availableFunctions = FunctionUtils.functionsFor(propertyType);
if (!isEntityItself && isCalculatedAndOfTypes(root, property, CalculatedPropertyCategory.AGGREGATED_EXPRESSION, CalculatedPropertyCategory.ATTRIBUTED_COLLECTIONAL_EXPRESSION)) {
final Set<Function> functions = new HashSet<Function>();
if (availableFunctions.contains(Function.SELF)) {
functions.add(Function.SELF);
}
return functions;
}
if (isEntityItself) {
availableFunctions.remove(Function.SELF);
}
if (!isInCollectionHierarchy(root, property)) {
availableFunctions.remove(Function.ALL);
availableFunctions.remove(Function.ANY);
}
if (!isEntityItself && Integer.class.isAssignableFrom(propertyType) && !isCalculatedAndOriginatedFromNotIntegerType(root, property)) {
availableFunctions.remove(Function.COUNT_DISTINCT);
}
return availableFunctions;
}
/**
* Returns <code>true</code> if the property is calculated.
*
* @param root
* @param property
* @return
*/
public static boolean isCalculated(final Class<?> root, final String property) {
return AnnotationReflector.getPropertyAnnotation(Calculated.class, root, property) != null;
}
/**
* Returns <code>true</code> if the property is calculated with one of the specified categories.
*
* @param root
* @param property
* @param types
* @return
*/
protected static boolean isCalculatedAndOfTypes(final Class<?> root, final String property, final CalculatedPropertyCategory ... types) {
final Calculated ca = AnnotationReflector.getPropertyAnnotation(Calculated.class, root, property);
if (ca != null) {
for (final CalculatedPropertyCategory type : types) {
if (type.equals(ca.category())) {
return true;
}
}
}
return false;
}
protected static boolean isCalculatedAndOriginatedFromDateType(final Class<?> root, final String property) {
final Calculated calculatedAnnotation = AnnotationReflector.getPropertyAnnotation(Calculated.class, root, property);
return calculatedAnnotation != null && EntityUtils.isDate(PropertyTypeDeterminator.determinePropertyType(root, calculatedAnnotation.origination()));
}
private static boolean isCalculatedAndOriginatedFromNotIntegerType(final Class<?> root, final String property) {
final Calculated calculatedAnnotation = AnnotationReflector.getPropertyAnnotation(Calculated.class, root, property);
return calculatedAnnotation != null && !Integer.class.isAssignableFrom(PropertyTypeDeterminator.determinePropertyType(root, calculatedAnnotation.origination()));
}
/**
* A specific Kryo serialiser for {@link AbstractDomainTreeRepresentation}.
*
* @author TG Team
*
*/
protected abstract static class AbstractDomainTreeRepresentationSerialiser<T extends AbstractDomainTreeRepresentation> extends AbstractDomainTreeSerialiser<T> {
public AbstractDomainTreeRepresentationSerialiser(final TgKryo kryo) {
super(kryo);
}
@Override
public void write(final ByteBuffer buffer, final T representation) {
writeValue(buffer, representation.getRootTypes());
writeValue(buffer, representation.getManuallyExcludedProperties());
writeValue(buffer, representation.getFirstTick());
writeValue(buffer, representation.getSecondTick());
writeValue(buffer, representation.getIncludedProperties());
}
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (manuallyExcludedProperties == null ? 0 : manuallyExcludedProperties.hashCode());
result = prime * result + (firstTick == null ? 0 : firstTick.hashCode());
result = prime * result + (includedProperties == null ? 0 : includedProperties.hashCode());
result = prime * result + (rootTypes == null ? 0 : rootTypes.hashCode());
result = prime * result + (secondTick == null ? 0 : secondTick.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final AbstractDomainTreeRepresentation other = (AbstractDomainTreeRepresentation) obj;
if (manuallyExcludedProperties == null) {
if (other.manuallyExcludedProperties != null) {
return false;
}
} else if (!manuallyExcludedProperties.equals(other.manuallyExcludedProperties)) {
return false;
}
if (firstTick == null) {
if (other.firstTick != null) {
return false;
}
} else if (!firstTick.equals(other.firstTick)) {
return false;
}
if (includedProperties == null) {
if (other.includedProperties != null) {
return false;
}
} else if (!includedProperties.equals(other.includedProperties)) {
return false;
}
if (rootTypes == null) {
if (other.rootTypes != null) {
return false;
}
} else if (!rootTypes.equals(other.rootTypes)) {
return false;
}
if (secondTick == null) {
if (other.secondTick != null) {
return false;
}
} else if (!secondTick.equals(other.secondTick)) {
return false;
}
return true;
}
/** Please do not use this directly, use {@link #includedPropertiesMutable(Class)} lazy getter instead. */
protected EnhancementRootsMap<ListenedArrayList> includedProperties() {
return includedProperties;
}
public EnhancementLinkedRootsSet getRootTypes() {
return rootTypes;
}
public EnhancementSet getManuallyExcludedProperties() {
return manuallyExcludedProperties;
}
public EnhancementRootsMap<ListenedArrayList> getIncludedProperties() {
return includedProperties;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.