answer
stringlengths 17
10.2M
|
|---|
package protocolsupport.server.listeners;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerInteractEntityEvent;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerToggleSneakEvent;
import org.bukkit.event.vehicle.VehicleEnterEvent;
import protocolsupport.ProtocolSupport;
import protocolsupport.api.ProtocolSupportAPI;
import protocolsupport.api.ProtocolVersion;
import protocolsupport.api.chat.components.BaseComponent;
import protocolsupport.api.tab.TabAPI;
public class PlayerListener implements Listener {
private final ProtocolSupport plugin;
public PlayerListener(ProtocolSupport plugin) {
this.plugin = plugin;
}
@EventHandler
public void onShift(PlayerToggleSneakEvent event) {
Player player = event.getPlayer();
if (player.isInsideVehicle() && ProtocolSupportAPI.getProtocolVersion(player).isBeforeOrEq(ProtocolVersion.MINECRAFT_1_5_2)) {
player.leaveVehicle();
}
}
@EventHandler(ignoreCancelled = true)
public void onVehicleInteract(PlayerInteractEntityEvent event) {
Player player = event.getPlayer();
if (player.isInsideVehicle() && ProtocolSupportAPI.getProtocolVersion(player).isBeforeOrEq(ProtocolVersion.MINECRAFT_1_5_2)) {
if (player.getVehicle().equals(event.getRightClicked())) {
player.leaveVehicle();
}
}
}
@EventHandler(ignoreCancelled = true)
public void onVehicleEnter(VehicleEnterEvent event) {
if (event.getVehicle().getPassenger() != null) {
event.setCancelled(true);
}
}
@EventHandler
public void onJoin(final PlayerJoinEvent event) {
Bukkit.getScheduler().runTaskLater(plugin, new Runnable() {
@Override
public void run() {
BaseComponent header = TabAPI.getDefaultHeader();
BaseComponent footer = TabAPI.getDefaultFooter();
if (header != null || footer != null) {
TabAPI.sendHeaderFooter(event.getPlayer(), header, footer);
}
}
}, 1);
}
}
|
// This file is part of the OpenNMS(R) Application.
// OpenNMS(R) is a derivative work, containing both original code, included code and modified
// and included code are below.
// OpenNMS(R) is a registered trademark of Blast Internet Services, Inc.
// Modifications:
// Aug 28, 2004: Created this file.
// This program is free software; you can redistribute it and/or modify
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
// For more information contact:
// Tab Size = 8
package org.opennms.netmgt.vacuumd.jmx;
/**
* Implementws the VacuumdMBead interface and delegeates the mbean implementation
* to the single Vacuumd.
*/
public class Vacuumd implements VacuumdMBean {
/* (non-Javadoc)
* @see org.opennms.netmgt.vacuumd.jmx.VacuumdMBean#init()
*/
public void init() {
org.opennms.netmgt.vacuumd.Vacuumd.getSingleton().init();
}
/* (non-Javadoc)
* @see org.opennms.netmgt.vacuumd.jmx.VacuumdMBean#start()
*/
public void start() {
org.opennms.netmgt.vacuumd.Vacuumd.getSingleton().start();
}
/* (non-Javadoc)
* @see org.opennms.netmgt.vacuumd.jmx.VacuumdMBean#stop()
*/
public void stop() {
org.opennms.netmgt.vacuumd.Vacuumd.getSingleton().stop();
}
/* (non-Javadoc)
* @see org.opennms.netmgt.vacuumd.jmx.VacuumdMBean#getStatus()
*/
public int getStatus() {
return org.opennms.netmgt.vacuumd.Vacuumd.getSingleton().getStatus();
}
/* (non-Javadoc)
* @see org.opennms.netmgt.vacuumd.jmx.VacuumdMBean#status()
*/
public String status() {
return org.opennms.core.fiber.Fiber.STATUS_NAMES[getStatus()];
}
/* (non-Javadoc)
* @see org.opennms.netmgt.vacuumd.jmx.VacuumdMBean#getStatusText()
*/
public String getStatusText() {
return org.opennms.core.fiber.Fiber.STATUS_NAMES[getStatus()];
}
}
|
package com.akiban.server.test.pt;
import com.akiban.server.test.ApiTestBase;
import com.akiban.ais.model.TableIndex;
import com.akiban.qp.expression.IndexBound;
import com.akiban.qp.expression.IndexKeyRange;
import com.akiban.qp.expression.RowBasedUnboundExpressions;
import com.akiban.qp.operator.API;
import com.akiban.qp.operator.Cursor;
import com.akiban.qp.operator.Operator;
import com.akiban.qp.operator.OperatorExecutionBase;
import com.akiban.qp.operator.QueryContext;
import com.akiban.qp.persistitadapter.PersistitAdapter;
import com.akiban.qp.row.Row;
import com.akiban.qp.row.ValuesHolderRow;
import com.akiban.qp.row.ValuesRow;
import com.akiban.qp.rowtype.DerivedTypesSchema;
import com.akiban.qp.rowtype.IndexRowType;
import com.akiban.qp.rowtype.RowType;
import com.akiban.qp.rowtype.Schema;
import com.akiban.qp.rowtype.ValuesRowType;
import com.akiban.server.api.dml.SetColumnSelector;
import com.akiban.server.error.QueryCanceledException;
import com.akiban.server.expression.Expression;
import com.akiban.server.expression.ExpressionComposer;
import com.akiban.server.expression.std.BoundFieldExpression;
import com.akiban.server.expression.std.Expressions;
import com.akiban.server.expression.std.FieldExpression;
import com.akiban.server.service.functions.FunctionsRegistry;
import com.akiban.server.service.functions.FunctionsRegistryImpl;
import com.akiban.server.service.session.Session;
import com.akiban.server.types.AkType;
import com.akiban.server.types.ValueSource;
import com.akiban.server.types.util.ValueHolder;
import com.akiban.util.ShareHolder;
import com.akiban.util.Shareable;
import com.persistit.Exchange;
import com.persistit.Key;
import com.persistit.exception.PersistitException;
import org.junit.Before;
import org.junit.Test;
import java.util.*;
import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReferenceArray;
public class AggregatePT extends ApiTestBase {
public static final int ROW_COUNT = 100000;
public static final int WARMUPS = 100, REPEATS = 10;
public AggregatePT() {
super("PT");
}
private TableIndex index;
@Before
public void loadData() {
int t = createTable("user", "t",
"id INT NOT NULL PRIMARY KEY",
"gid INT",
"flag BOOLEAN",
"sval VARCHAR(20) NOT NULL",
"n1 INT",
"n2 INT",
"k INT");
Random rand = new Random(69);
for (int i = 0; i < ROW_COUNT; i++) {
writeRow(t, i,
rand.nextInt(10),
(rand.nextInt(100) < 80) ? 0 : 1,
randString(rand, 20),
rand.nextInt(100),
rand.nextInt(1000),
rand.nextInt());
}
index = createIndex("user", "t", "t_i",
"gid", "sval", "flag", "k", "n1", "n2", "id");
}
private String randString(Random rand, int size) {
StringBuilder str = new StringBuilder();
for (int i = 0; i < size; i++) {
str.append((char)('A' + rand.nextInt(26)));
}
return str.toString();
}
@Test
public void normalOperators() {
Schema schema = new Schema(rowDefCache().ais());
IndexRowType indexType = schema.indexRowType(index);
IndexKeyRange keyRange = IndexKeyRange.unbounded(indexType);
API.Ordering ordering = new API.Ordering();
ordering.append(new FieldExpression(indexType, 0), true);
Operator plan = API.indexScan_Default(indexType, keyRange, ordering);
RowType rowType = indexType;
plan = spa(plan, rowType);
PersistitAdapter adapter = persistitAdapter(schema);
QueryContext queryContext = queryContext(adapter);
System.out.println("NORMAL OPERATORS");
double time = 0.0;
for (int i = 0; i < WARMUPS+REPEATS; i++) {
long start = System.nanoTime();
Cursor cursor = API.cursor(plan, queryContext);
cursor.open();
while (true) {
Row row = cursor.next();
if (row == null) break;
if (i == 0) System.out.println(row);
}
cursor.close();
long end = System.nanoTime();
if (i >= WARMUPS)
time += (end - start) / 1.0e6;
}
System.out.println(String.format("%g ms", time / REPEATS));
}
private Operator spa(Operator plan, RowType rowType) {
FunctionsRegistry functions = new FunctionsRegistryImpl();
ExpressionComposer and = functions.composer("and");
Expression pred1 = functions.composer("greaterOrEquals")
.compose(Arrays.asList(Expressions.field(rowType, 1),
Expressions.literal("M")));
Expression pred2 = functions.composer("lessOrEquals")
.compose(Arrays.asList(Expressions.field(rowType, 1),
Expressions.literal("Y")));
Expression pred = and.compose(Arrays.asList(pred1, pred2));
pred2 = functions.composer("notEquals")
.compose(Arrays.asList(Expressions.field(rowType, 2),
Expressions.literal(1L)));
pred = and.compose(Arrays.asList(pred, pred2));
plan = API.select_HKeyOrdered(plan, rowType, pred);
plan = API.project_Default(plan, rowType,
Arrays.asList(Expressions.field(rowType, 0),
Expressions.field(rowType, 3),
Expressions.field(rowType, 4),
Expressions.field(rowType, 5)));
rowType = plan.rowType();
plan = API.aggregate_Partial(plan, rowType,
1, functions,
Arrays.asList("count", "sum", "sum"));
return plan;
}
@Test
public void bespokeOperator() {
Schema schema = new Schema(rowDefCache().ais());
IndexRowType indexType = schema.indexRowType(index);
IndexKeyRange keyRange = IndexKeyRange.unbounded(indexType);
API.Ordering ordering = new API.Ordering();
ordering.append(new FieldExpression(indexType, 0), true);
Operator plan = API.indexScan_Default(indexType, keyRange, ordering);
RowType rowType = indexType;
plan = new BespokeOperator(plan);
PersistitAdapter adapter = persistitAdapter(schema);
QueryContext queryContext = queryContext(adapter);
System.out.println("BESPOKE OPERATOR");
double time = 0.0;
for (int i = 0; i < WARMUPS+REPEATS; i++) {
long start = System.nanoTime();
Cursor cursor = API.cursor(plan, queryContext);
cursor.open();
while (true) {
Row row = cursor.next();
if (row == null) break;
if (i == 0) System.out.println(row);
}
cursor.close();
long end = System.nanoTime();
if (i >= WARMUPS)
time += (end - start) / 1.0e6;
}
System.out.println(String.format("%g ms", time / REPEATS));
}
@Test
public void pojoAggregator() throws PersistitException {
System.out.println("POJO");
double time = 0.0;
for (int i = 0; i < WARMUPS+REPEATS; i++) {
long start = System.nanoTime();
POJOAggregator aggregator = new POJOAggregator(i == 0);
Exchange exchange = persistitStore().getExchange(session(), index);
exchange.clear();
exchange.append(Key.BEFORE);
while (exchange.traverse(Key.GT, true)) {
Key key = exchange.getKey();
aggregator.aggregate(key);
}
aggregator.emit();
persistitStore().releaseExchange(session(), exchange);
long end = System.nanoTime();
if (i >= WARMUPS)
time += (end - start) / 1.0e6;
}
System.out.println(String.format("%g ms", time / REPEATS));
}
static class BespokeOperator extends Operator {
private Operator inputOperator;
private RowType outputType;
public BespokeOperator(Operator inputOperator) {
this.inputOperator = inputOperator;
outputType = new BespokeRowType();
}
@Override
protected Cursor cursor(QueryContext context) {
return new BespokeCursor(context, API.cursor(inputOperator, context), outputType);
}
@Override
public void findDerivedTypes(Set<RowType> derivedTypes) {
inputOperator.findDerivedTypes(derivedTypes);
derivedTypes.add(outputType);
}
@Override
public List<Operator> getInputOperators() {
return Collections.singletonList(inputOperator);
}
@Override
public RowType rowType() {
return outputType;
}
}
static class BespokeCursor extends OperatorExecutionBase implements Cursor {
private Cursor inputCursor;
private RowType outputType;
private ValuesHolderRow outputRow;
private BespokeAggregator aggregator;
public BespokeCursor(QueryContext context, Cursor inputCursor, RowType outputType) {
super(context);
this.inputCursor = inputCursor;
this.outputType = outputType;
}
@Override
public void open() {
inputCursor.open();
outputRow = new ValuesHolderRow(outputType);
aggregator = new BespokeAggregator();
}
@Override
public void close() {
inputCursor.close();
aggregator = null;
}
@Override
public void destroy() {
close();
inputCursor = null;
}
@Override
public boolean isIdle() {
return ((inputCursor != null) && (aggregator == null));
}
@Override
public boolean isActive() {
return ((inputCursor != null) && (aggregator != null));
}
@Override
public boolean isDestroyed() {
return (inputCursor == null);
}
@Override
public Row next() {
if (aggregator == null)
return null;
while (true) {
Row inputRow = inputCursor.next();
if (inputRow == null) {
if (aggregator.isEmpty()) {
close();
return null;
}
aggregator.fill(outputRow);
close();
return outputRow;
}
if (aggregator.aggregate(inputRow, outputRow)) {
return outputRow;
}
}
}
}
static final AkType[] TYPES = {
AkType.LONG, AkType.LONG, AkType.LONG, AkType.LONG
};
static class BespokeRowType extends RowType {
public BespokeRowType() {
super(-1);
}
@Override
public DerivedTypesSchema schema() {
return null;
}
public int nFields() {
return TYPES.length;
}
public AkType typeAt(int index) {
return TYPES[index];
}
}
static class BespokeAggregator {
private boolean key_init;
private long key;
private long count1;
private boolean sum1_init;
private long sum1;
private boolean sum2_init;
private long sum2;
public boolean isEmpty() {
return !key_init;
}
public boolean aggregate(Row inputRow, ValuesHolderRow outputRow) {
// The select part.
String sval = inputRow.eval(1).getString();
if (("M".compareTo(sval) > 0) ||
("Y".compareTo(sval) < 0))
return false;
long flag = inputRow.eval(2).getInt();
if (flag == 1)
return false;
// The actual aggregate part.
boolean emit = false, reset = false;
long nextKey = inputRow.eval(0).getInt();
if (!key_init) {
key_init = reset = true;
key = nextKey;
}
else if (key != nextKey) {
fill(outputRow);
emit = reset = true;
key = nextKey;
}
if (reset) {
sum1_init = sum2_init = false;
count1 = sum1 = sum2 = 0;
}
ValueSource value = inputRow.eval(3);
if (!value.isNull()) {
count1++;
}
value = inputRow.eval(4);
if (!value.isNull()) {
if (!sum1_init)
sum1_init = true;
sum1 += value.getInt();
}
value = inputRow.eval(5);
if (!value.isNull()) {
if (!sum2_init)
sum2_init = true;
sum2 += value.getInt();
}
return emit;
}
public void fill(ValuesHolderRow row) {
row.holderAt(0).putLong(key);
row.holderAt(1).putLong(count1);
row.holderAt(2).putLong(sum1);
row.holderAt(3).putLong(sum2);
}
@Override
public String toString() {
return String.format("%d: [%d %d %d]", key, count1, sum1, sum2);
}
}
static class POJOAggregator {
private boolean key_init;
private long key;
private long count1;
private boolean sum1_init;
private long sum1;
private boolean sum2_init;
private long sum2;
private final boolean doPrint;
public POJOAggregator(boolean doPrint) {
this.doPrint = doPrint;
}
public void aggregate(Key row) {
row.indexTo(1);
String sval = row.decodeString();
if (("M".compareTo(sval) > 0) ||
("Y".compareTo(sval) < 0))
return;
row.indexTo(2);
long flag = row.decodeLong();
if (flag == 1)
return;
row.indexTo(0);
boolean reset = false;
long nextKey = row.decodeLong();
if (!key_init) {
key_init = reset = true;
key = nextKey;
}
else if (key != nextKey) {
emit();
reset = true;
key = nextKey;
}
if (reset) {
sum1_init = sum2_init = false;
count1 = sum1 = sum2 = 0;
}
row.indexTo(3);
if (!row.isNull()) {
count1++;
}
row.indexTo(4);
if (!row.isNull()) {
if (!sum1_init)
sum1_init = true;
sum1 += row.decodeLong();
}
row.indexTo(5);
if (!row.isNull()) {
if (!sum2_init)
sum2_init = true;
sum2 += row.decodeLong();
}
}
public void emit() {
if (doPrint)
System.out.println(this);
}
@Override
public String toString() {
return String.format("%d: [%d %d %d]", key, count1, sum1, sum2);
}
}
@Test
public void sorted() {
Schema schema = new Schema(rowDefCache().ais());
IndexRowType indexType = schema.indexRowType(index);
IndexKeyRange keyRange = IndexKeyRange.unbounded(indexType);
API.Ordering ordering = new API.Ordering();
ordering.append(new FieldExpression(indexType, 0), true);
Operator plan = API.indexScan_Default(indexType, keyRange, ordering);
RowType rowType = indexType;
plan = spa(plan, rowType);
rowType = plan.rowType();
ordering = new API.Ordering();
ordering.append(new FieldExpression(rowType, 2), true);
plan = API.sort_InsertionLimited(plan, rowType, ordering,
API.SortOption.PRESERVE_DUPLICATES, 100);
PersistitAdapter adapter = persistitAdapter(schema);
QueryContext queryContext = queryContext(adapter);
System.out.println("SORTED");
double time = 0.0;
for (int i = 0; i < WARMUPS+REPEATS; i++) {
long start = System.nanoTime();
Cursor cursor = API.cursor(plan, queryContext);
cursor.open();
while (true) {
Row row = cursor.next();
if (row == null) break;
if (i == 0) System.out.println(row);
}
cursor.close();
long end = System.nanoTime();
if (i >= WARMUPS)
time += (end - start) / 1.0e6;
}
System.out.println(String.format("%g ms", time / REPEATS));
}
@Test
public void parallel() {
Schema schema = new Schema(rowDefCache().ais());
IndexRowType indexType = schema.indexRowType(index);
ValuesRowType valuesType = schema.newValuesType(AkType.LONG, AkType.LONG);
IndexBound lo = new IndexBound(new RowBasedUnboundExpressions(indexType, Collections.<Expression>singletonList(new BoundFieldExpression(0, new FieldExpression(valuesType, 0)))), new SetColumnSelector(0));
IndexBound hi = new IndexBound(new RowBasedUnboundExpressions(indexType, Collections.<Expression>singletonList(new BoundFieldExpression(0, new FieldExpression(valuesType, 1)))), new SetColumnSelector(0));
IndexKeyRange keyRange = IndexKeyRange.bounded(indexType, lo, true, hi, false);
API.Ordering ordering = new API.Ordering();
ordering.append(new FieldExpression(indexType, 0), true);
Operator plan = API.indexScan_Default(indexType, keyRange, ordering);
RowType rowType = indexType;
plan = spa(plan, rowType);
rowType = plan.rowType();
int nthreads = Integer.parseInt(System.getProperty("test.nthreads", "4"));
double n = 10.0 / nthreads;
List<ValuesRow> keyRows = new ArrayList<ValuesRow>();
for (int i = 0; i < nthreads; i++) {
Object[] values = new Object[2];
values[0] = Math.round(n * i);
values[1] = Math.round(n * (i+1));
keyRows.add(new ValuesRow(valuesType, values));
}
plan = new Map_Parallel(plan, rowType, valuesType, keyRows, 0);
ordering = new API.Ordering();
ordering.append(new FieldExpression(rowType, 2), true);
plan = API.sort_InsertionLimited(plan, rowType, ordering,
API.SortOption.PRESERVE_DUPLICATES, 100);
PersistitAdapter adapter = persistitAdapter(schema);
QueryContext queryContext = queryContext(adapter);
System.out.println("PARALLEL");
double time = 0.0;
for (int i = 0; i < WARMUPS+REPEATS; i++) {
long start = System.nanoTime();
Cursor cursor = API.cursor(plan, queryContext);
cursor.open();
while (true) {
Row row = cursor.next();
if (row == null) break;
if (i == 0) System.out.println(row);
}
cursor.close();
long end = System.nanoTime();
if (i >= WARMUPS)
time += (end - start) / 1.0e6;
}
System.out.println(String.format("%g ms", time / REPEATS));
}
class Map_Parallel extends Operator {
private Operator inputOperator;
private RowType outputType;
private ValuesRowType valuesType;
private List<ValuesRow> valuesRows;
private int bindingPosition;
public Map_Parallel(Operator inputOperator, RowType outputType, ValuesRowType valuesType, List<ValuesRow> valuesRows, int bindingPosition) {
this.inputOperator = inputOperator;
this.outputType = outputType;
this.valuesType = valuesType;
this.valuesRows = valuesRows;
this.bindingPosition = bindingPosition;
}
@Override
protected Cursor cursor(QueryContext context) {
return new ParallelCursor(context, inputOperator, valuesType, valuesRows, bindingPosition);
}
@Override
public List<Operator> getInputOperators() {
return Collections.singletonList(inputOperator);
}
}
class ParallelCursor extends OperatorExecutionBase implements Cursor {
private QueryContext context;
private ShareHolderMux<Row> queue;
private List<WorkerThread> threads;
private int nrunning;
private int takeIndex;
public ParallelCursor(QueryContext context, Operator inputOperator, ValuesRowType valuesType, List<ValuesRow> valuesRows, int bindingPosition) {
this.context = context;
int nthreads = valuesRows.size();
queue = new ShareHolderMux<Row>(nthreads * nthreads);
threads = new ArrayList<WorkerThread>(nthreads);
for (ValuesRow valuesRow : valuesRows) {
threads.add(new WorkerThread(context, inputOperator, valuesType, valuesRow, bindingPosition, queue));
}
}
@Override
public void open() {
nrunning = 0;
takeIndex = -1;
for (WorkerThread thread : threads) {
thread.open();
nrunning++;
}
}
@Override
public void close() {
for (WorkerThread thread : threads) {
if (thread.close())
nrunning
}
// TODO: Could be off if closed prematurely and there are
// nulls in the queue (or waiting to be added).
assert (nrunning == 0) : nrunning;
}
@Override
public void destroy() {
for (WorkerThread thread : threads) {
thread.destroy();
}
threads = null;
queue.clear();
queue = null;
}
@Override
public boolean isIdle() {
return (nrunning == 0);
}
@Override
public boolean isActive() {
return (nrunning > 0);
}
@Override
public boolean isDestroyed() {
return (threads == null);
}
@Override
public Row next() {
while (nrunning > 0) {
if (takeIndex >= 0) {
queue.releaseHoldIndex(takeIndex);
takeIndex = -1;
}
try {
takeIndex = queue.nextGetIndex();
}
catch (InterruptedException ex) {
throw new QueryCanceledException(context.getSession());
}
Row row = queue.get(takeIndex);
if (row == null)
nrunning
else
return row;
}
return null;
}
}
class WorkerThread implements Runnable {
private Session session;
private PersistitAdapter adapter;
private QueryContext context;
private Cursor inputCursor;
private ShareHolderMux<Row> queue;
private Thread thread;
private volatile boolean open;
public WorkerThread(QueryContext context, Operator inputOperator, ValuesRowType valuesType, ValuesRow valuesRow, int bindingPosition, ShareHolderMux<Row> queue) {
session = createNewSession();
adapter = new PersistitAdapter((Schema)valuesType.schema(), persistitStore(), treeService(), session, configService());
context = queryContext(adapter);
context.setRow(bindingPosition, valuesRow);
inputCursor = API.cursor(inputOperator, context);
this.queue = queue;
}
public void open() {
thread = new Thread(this);
thread.start();
}
public boolean close() {
if (!open) return false;
thread.interrupt();
//thread.join();
return true;
}
public void destroy() {
close();
if (inputCursor != null) {
inputCursor.destroy();
inputCursor = null;
}
session.close();
}
@Override
public void run() {
inputCursor.open();
open = true;
try {
while (open) {
Row row = inputCursor.next();
if (row == null)
open = false;
queue.put(row);
}
}
catch (InterruptedException ex) {
throw new QueryCanceledException(context.getSession());
}
finally {
inputCursor.close();
}
}
}
/** A multiplexer over a fixed number of {@link Shareable}s.
* There is no guarantee of output order, even for items inserted by the same thread.
* The assumption is that the caller will sort them or does not care.
*/
static class ShareHolderMux<T extends Shareable> {
private int size;
private AtomicReferenceArray<T> buffers;
private AtomicLong holdMask = new AtomicLong();
private AtomicLong getMask = new AtomicLong();
private Semaphore holdSemaphore = new Semaphore(0);
private Semaphore getSemaphore = new Semaphore(0);
public ShareHolderMux(int size) {
assert (size < 64);
this.size = size;
buffers = new AtomicReferenceArray<T>(size);
}
/** Get the item at the given position. */
public T get(int i) {
return buffers.get(i);
}
/** Store an item into the given position. */
public void hold(int i, T item) {
if (item != null) {
item.acquire();
}
item = buffers.getAndSet(i, item);
if (item != null) {
item.release();
}
}
/** Get an available index into which to {@link #hold} an
* item, blocking when full. */
public int nextHoldIndex() throws InterruptedException {
while (true) {
long mask = holdMask.get();
long bit = Long.lowestOneBit(~mask);
if ((bit == 0) || (bit > size)) {
holdSemaphore.acquire();
continue;
}
if (holdMask.compareAndSet(mask, mask | bit))
return Long.numberOfTrailingZeros(bit);
}
}
/** Get an index which has been filled, blocking when empty. */
public int nextGetIndex() throws InterruptedException {
while (true) {
long mask = getMask.get();
long bit = Long.lowestOneBit(mask);
if (bit == 0) {
getSemaphore.acquire();
continue;
}
if (getMask.compareAndSet(mask, mask & ~bit)) {
return Long.numberOfTrailingZeros(bit);
}
}
}
/** Mark an index returned by {@link #nextHoldIndex} as having been filled.
* @see #put
*/
public void heldGetIndex(int i) {
while (true) {
long mask = getMask.get();
if (getMask.compareAndSet(mask, mask | (1 << i))) {
if (mask == 0) {
// Was previously empty.
getSemaphore.release();
}
break;
}
}
}
/** Clear the given index position, making it available to {@link put} again. */
public void releaseHoldIndex(int i) {
hold(i, null);
while (true) {
long mask = holdMask.get();
if (holdMask.compareAndSet(mask, mask & ~(1 << i))) {
if (mask == (1 << size) - 1) {
// Was previously full.
holdSemaphore.release();
}
break;
}
}
}
/** Add an item to the buffer, waiting for an available slot. */
public void put(T item) throws InterruptedException {
int i = nextHoldIndex();
hold(i, item);
heldGetIndex(i);
}
public void clear() {
for (int i = 0; i < size; i++) {
hold(i, null);
}
holdSemaphore.drainPermits();
getSemaphore.drainPermits();
}
}
}
|
package com.deepoove.poi.tl;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileOutputStream;
import java.util.HashMap;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import com.deepoove.poi.XWPFTemplate;
import com.deepoove.poi.data.PictureRenderData;
import com.deepoove.poi.util.BytePictureUtils;
/**
*
* @author Sayi
* @version 1.0.0
*/
public class PictureRenderTest {
BufferedImage bufferImage;
@Before
public void init(){
bufferImage = BytePictureUtils.newBufferImage(100, 100);
Graphics2D g = (Graphics2D)bufferImage.getGraphics();
g.setColor(Color.red);
g.fillRect(0, 0, 100, 100);
g.dispose();
bufferImage.flush();
}
@SuppressWarnings("serial")
@Test
public void testPictureRender() throws Exception {
Map<String, Object> datas = new HashMap<String, Object>() {
{
put("localPicture", new PictureRenderData(100, 120, "src/test/resources/logo.png"));
//byte
put("localBytePicture", new PictureRenderData(100, 120, ".png", BytePictureUtils.getLocalByteArray(new File("src/test/resources/logo.png"))));
put("urlPicture", new PictureRenderData(100, 100, ".png", BytePictureUtils.getUrlByteArray("https://avatars3.githubusercontent.com/u/1394854?v=3&s=40")));
// java
put("bufferImagePicture", new PictureRenderData(100, 120, ".png", BytePictureUtils.getBufferByteArray(bufferImage)));
}
};
XWPFTemplate template = XWPFTemplate.compile("src/test/resources/picture.docx")
.render(datas);
FileOutputStream out = new FileOutputStream("out_picture.docx");
template.write(out);
out.flush();
out.close();
template.close();
}
}
|
package com.github.dakusui.crest.ut;
import com.github.dakusui.crest.Crest;
import com.github.dakusui.crest.core.Assertion;
import com.github.dakusui.crest.core.InternalUtils;
import com.github.dakusui.crest.core.Matcher;
import com.github.dakusui.crest.utils.TestBase;
import com.github.dakusui.crest.utils.printable.Predicates;
import org.hamcrest.CoreMatchers;
import org.hamcrest.Matchers;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import java.util.LinkedList;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import static com.github.dakusui.crest.Crest.allOf;
import static com.github.dakusui.crest.Crest.anyOf;
import static com.github.dakusui.crest.utils.printable.Functions.elementAt;
import static com.github.dakusui.crest.utils.printable.Functions.size;
import static com.github.dakusui.crest.utils.printable.Predicates.equalTo;
import static org.junit.Assert.*;
@RunWith(Enclosed.class)
public class CrestTest {
static class Description {
private final String content;
Description(String s) {
this.content = s;
}
@Override
public String toString() {
return this.content;
}
}
private static final Predicate<Integer> FAILING_CHECK = InternalUtils.predicate("failingCheck", v -> {
throw new RuntimeException("FAILED");
});
private static final Function<List<String>, Integer> FAILING_TRANSFORM = InternalUtils.function("failingTransform", v -> {
throw new RuntimeException("FAILED");
});
/**
* <pre>
* Conj
* (1): P -> P : pass
* (2): P -> F : fail
* (3): E -> P : fail
* (4): F -> F : fail
* </pre>
* <pre>
* TestData: ["Hello", "world", "!"]
* </pre>
*/
public static class ConjTest extends TestBase {
/**
* <pre>
* Conj
* (1): P -> P : pass
* </pre>
*/
@Test
public void whenPassingAndThenPassing$thenPasses() {
List<String> aList = composeTestData();
Optional<Description> description = CrestTest.describeFailure(
aList,
allOf(
Crest.asObject(
elementAt(0)
).check(
equalTo("Hello")).all()
,
Crest.asObject(
size()
).check(
equalTo(3)
).all()
));
System.out.println(description.orElse(null));
assertFalse(description.isPresent());
}
@Test
public void makeSureCalledOnlyOnce() {
List<String> aList = composeTestData();
Optional<Description> description = CrestTest.describeFailure(
aList,
allOf(
Crest.asObject(
new Function<List<?>, String>() {
boolean firstTime = true;
@Override
public String apply(List<?> objects) {
try {
if (firstTime)
return (String) elementAt(0).apply(objects);
else
throw new Error();
} finally {
firstTime = false;
}
}
}
).check(
new Predicate<String>() {
boolean firstTime = true;
@Override
public boolean test(String s) {
try {
if (firstTime)
return equalTo("Hello").test(s);
else
throw new Error();
} finally {
firstTime = false;
}
}
}
).all()
,
Crest.asObject(
size()
).check(
equalTo(3)
).all()
));
System.out.println(description.orElse(null));
assertFalse(description.isPresent());
}
/**
* <pre>
* Conj
* (2): P -> F : fail
* </pre>
*/
@Test
public void whenPassingAndThenFailing$thenFailsAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
allOf(
Crest.asObject(size())
.check(equalTo(3))
.all(),
Crest.asObject(elementAt(0))
.check(equalTo("hello"))
.all()
));
System.out.println(description.orElse(null));
assertEquals(
"\n" +
"Expected: and:[\n"
+ " size(x) equalTo[3]\n"
+ " elementAt[0](x) equalTo[hello]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then and:[\n"
+ " size(x) equalTo[3]\n"
+ " elementAt[0](x) equalTo[hello] was not met because elementAt[0](x)=\"Hello\"\n"
+ "]->false",
description.orElseThrow(AssertionError::new).toString()
);
}
/**
* <pre>
* Conj
* (3): E -> P : error
* </pre>
*/
@Test
public void whenErrorOnCheckAndThenPassing$thenErrorThrownAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
allOf(
Crest.asObject(size()).check(FAILING_CHECK).all(),
Crest.asObject(elementAt(0)).check(equalTo("Hello")).all()
));
System.out.println(description.orElse(null));
assertThat(
description.orElseThrow(AssertionError::new).toString(),
CoreMatchers.startsWith(
"\n" +
"Expected: and:[\n"
+ " size(x) failingCheck\n"
+ " elementAt[0](x) equalTo[Hello]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then and:[\n"
+ " size(x) failingCheck failed with java.lang.RuntimeException(FAILED)\n"
+ " elementAt[0](x) equalTo[Hello]\n"
+ "]->false\n"
+ "FAILED"
));
}
/**
* <pre>
* Conj
* (3): E -> P : error
* </pre>
*/
@Test
public void whenErrorOnTransformAndThenPassing$thenErrorThrownAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
allOf(
Crest.asObject(FAILING_TRANSFORM).check(Predicates.alwaysTrue()).all(),
Crest.asObject(elementAt(0)).check(equalTo("Hello")).all()
));
System.out.println(description.orElse(null));
assertThat(
description.orElseThrow(AssertionError::new).toString(),
CoreMatchers.startsWith(
"\n" +
"Expected: and:[\n"
+ " failingTransform(x) alwaysTrue\n"
+ " elementAt[0](x) equalTo[Hello]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then and:[\n"
+ " failingTransform(x) alwaysTrue failed with java.lang.RuntimeException(FAILED)\n"
+ " elementAt[0](x) equalTo[Hello]\n"
+ "]->false\n"
+ "FAILED"
));
}
/**
* <pre>
* Conj
* (4): F -> F : fail
* </pre>
*/
@Test
public void whenFailingAndThenFailing$thenFailsAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
allOf(
Crest.asObject(size()).check(equalTo(2)).all(),
Crest.asObject(elementAt(0)).check(equalTo("hello")).all()
));
System.out.println(description.orElse(null));
assertEquals(
"\n" +
"Expected: and:[\n"
+ " size(x) equalTo[2]\n"
+ " elementAt[0](x) equalTo[hello]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then and:[\n"
+ " size(x) equalTo[2] was not met because size(x)=<3>\n"
+ " elementAt[0](x) equalTo[hello] was not met because elementAt[0](x)=\"Hello\"\n"
+ "]->false",
description.orElseThrow(AssertionError::new).toString()
);
}
}
/**
* <pre>
* Disj
* (1): P -> P : pass
* (2): P -> F : pass
* (3): E -> P : fail
* (4): F -> F : fail
* </pre>
*/
public static class DisjTest extends TestBase {
/**
* <pre>
* Disj
* (1): P -> P : pass
* </pre>
*/
@Test
public void whenPassingAndThen$thenPasses() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
anyOf(
Crest.asObject(size()).check(equalTo(3)).all(),
Crest.asObject(elementAt(0)).check(equalTo("Hello")).all()
));
System.out.println(description.orElse(null));
assertFalse(description.isPresent());
}
/**
* <pre>
* Disj
* (2): P -> F : fail
* </pre>
*/
@Test
public void whenDisjPassingAndThenFailing$thenFailsAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
anyOf(
Crest.asObject(size()).check(equalTo(3)).all(),
Crest.asObject(elementAt(0)).check(equalTo("hello")).all()
));
System.out.println(description.orElse(null));
System.out.println(description.orElse(null));
assertFalse(description.isPresent());
}
/**
* <pre>
* Disj
* (3): E -> P : error
* </pre>
* In case an error is thrown, the assertion should fail even if all the other matchers are passing.
*/
@Test
public void whenErrorAndThenPassing$thenErrorThrownAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
anyOf(
Crest.asObject(size()).check(FAILING_CHECK).all(),
Crest.asObject(elementAt(0)).check(equalTo("Hello")).all()
));
System.out.println(description.orElse(null));
assertThat(
description.orElseThrow(AssertionError::new).toString(),
CoreMatchers.startsWith("\n" +
"Expected: or:[\n"
+ " size(x) failingCheck\n"
+ " elementAt[0](x) equalTo[Hello]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then or:[\n"
+ " size(x) failingCheck failed with java.lang.RuntimeException(FAILED)\n"
+ " elementAt[0](x) equalTo[Hello]\n"
+ "]->false\n"
+ "FAILED"
)
);
}
/**
* <pre>
* Disj
* (4): F -> F : fail
* </pre>
*/
@Test
public void whenFailingAndThenFailing$thenFailsAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
anyOf(
Crest.asObject(size()).check(equalTo(2)).matcher(),
Crest.asObject(elementAt(0)).check(equalTo("hello")).matcher()
));
System.out.println(description.orElse(null));
assertEquals(
"\n" +
"Expected: or:[\n"
+ " size(x) equalTo[2]\n"
+ " elementAt[0](x) equalTo[hello]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then or:[\n"
+ " size(x) equalTo[2] was not met because size(x)=<3>\n"
+ " elementAt[0](x) equalTo[hello] was not met because elementAt[0](x)=\"Hello\"\n"
+ "]->false",
description.orElseThrow(AssertionError::new).toString()
);
}
}
public static class NestedTest extends TestBase {
/**
* <pre>
* Disj
* ->Conj
* </pre>
*/
@Test
public void whenConjUnderDisj$thenFailsAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
anyOf(
Crest.asObject(size()).check(equalTo(2)).all(),
Crest.asObject(elementAt(0)).check(equalTo("hello")).check(equalTo("HELLO")).all()
));
System.out.println(description.orElse(null));
assertEquals(
"\n" +
"Expected: or:[\n"
+ " size(x) equalTo[2]\n"
+ " and:[\n"
+ " elementAt[0](x) equalTo[hello]\n"
+ " elementAt[0](x) equalTo[HELLO]\n"
+ " ]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then or:[\n"
+ " size(x) equalTo[2] was not met because size(x)=<3>\n"
+ " and:[\n"
+ " elementAt[0](x) equalTo[hello] was not met because elementAt[0](x)=\"Hello\"\n"
+ " elementAt[0](x) equalTo[HELLO] was not met because elementAt[0](x)=\"Hello\"\n"
+ " ]->false\n"
+ "]->false",
description.orElseThrow(AssertionError::new).toString()
);
}
/**
* <pre>
* Conj
* ->Disj
* </pre>
*/
@Test
public void whenDisjUnderConj$thenFailsAndMessageAppropriate() {
List<String> aList = composeTestData();
Optional<Description> description = describeFailure(
aList,
allOf(
Crest.asObject(size()).check(equalTo(2)).all(),
Crest.asObject(elementAt(0)).check(equalTo("hello")).check(equalTo("HELLO")).any()
));
System.out.println(description.orElse(null));
assertEquals(
"\n" +
"Expected: and:[\n"
+ " size(x) equalTo[2]\n"
+ " or:[\n"
+ " elementAt[0](x) equalTo[hello]\n"
+ " elementAt[0](x) equalTo[HELLO]\n"
+ " ]\n"
+ "]\n"
+ " but: when x=<[Hello, world, !]>; then and:[\n"
+ " size(x) equalTo[2] was not met because size(x)=<3>\n"
+ " or:[\n"
+ " elementAt[0](x) equalTo[hello] was not met because elementAt[0](x)=\"Hello\"\n"
+ " elementAt[0](x) equalTo[HELLO] was not met because elementAt[0](x)=\"Hello\"\n"
+ " ]->false\n"
+ "]->false",
description.orElseThrow(AssertionError::new).toString()
);
}
}
private static <T> Optional<Description> describeFailure(T actual, Matcher<? super T> matcher) {
Assertion<T> assertion = Assertion.create(null, matcher);
if (!matcher.matches(actual, assertion)) {
String description = "\nExpected: " +
String.join("\n", matcher.describeExpectation(assertion)) +
"\n but: " +
String.join("\n", matcher.describeMismatch(actual, assertion));
return Optional.of(new Description(description));
}
return Optional.empty();
}
private static List<String> composeTestData() {
return new LinkedList<String>() {{
add("Hello");
add("world");
add("!");
}};
}
public static class NegativesTest extends TestBase {
@Test
public void given_NotMatcher_$whenFailingTestPerformed$thenMessageCorrect() {
Optional<Description> description = describeFailure(
"HELLO",
Crest.not(
Crest.asString().containsString("HELLO").$()
)
);
System.out.println(description.orElseThrow(RuntimeException::new));
assertThat(
description.<String>get().content,
Matchers.<String>containsString("not:[\n"
+ " toString(x) containsString[HELLO]\n"
+ "]->false")
);
}
@Test
public void given_NotMatcher_$whenPassingTestPerformed$thenPassed() {
Optional<Description> description = describeFailure(
"HELLO",
Crest.not(
Crest.asString().containsString("WORLD").$()
)
);
description.ifPresent(desc -> fail("Should have been passed but failed with a following message:" + desc.content));
}
@Test
public void given_NoneOfMatcher_$whenFailingTestPerformed$thenMessageCorrect() {
Optional<Description> description = describeFailure(
"HELLO",
Crest.noneOf(
Crest.asString().eq("WORLD").$(),
Crest.asString().containsString("HELLO").$()
)
);
System.out.println(description.orElseThrow(RuntimeException::new));
assertThat(
description.<String>get().content,
Matchers.<String>containsString("toString(x) =[WORLD] was not met because toString(x)=\"HELLO\"")
);
}
@Test
public void given_NoneOfMatcher_$whenPassingTestPerformed$thenPasses() {
Optional<Description> description = describeFailure(
"HELLO",
Crest.noneOf(
Crest.asString().eq("WORLD").$(),
Crest.asString().containsString("hellox").$()
)
);
description.ifPresent(desc -> fail("Should have been passed but failed with a following message:" + desc.content));
}
}
}
|
package com.itelg.xpath.helper;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import org.fest.assertions.Assertions;
import org.joda.time.DateTime;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class DateHelperTest
{
@BeforeClass
public static void init()
{
System.setProperty("user.timezone", "Europe/Berlin");
}
/**
* START {@link java.util.Date}
*/
@Test
public void testToDate()
{
Assert.assertEquals(new DateTime(2015, 1, 1, 0, 0).toDate(), DateHelper.toDate("2015-01-01", "yyyy-MM-dd"));
Assert.assertEquals(new DateTime(2015, 1, 1, 0, 0).toDate(), DateHelper.toDate("2015-01-01", "yyyy-MM-dd"));
Assert.assertEquals(new DateTime(2015, 1, 1, 1, 1, 1).toDate(), DateHelper.toDate("2015-01-01 01:01:01", "yyyy-MM-dd HH:mm:ss"));
Assert.assertEquals(new DateTime(2015, 1, 1, 1, 1, 1).toDate(), DateHelper.toDate("2015-01-01 01:01:01", "yyyy-MM-dd HH:mm:ss"));
}
@Test(expected = IllegalArgumentException.class)
public void testToDateWithWrongFormat()
{
DateHelper.toDate("2015-01-01 01:01:01", "");
}
@Test(expected = IllegalArgumentException.class)
public void testToDateWithWrongDate()
{
DateHelper.toDate("", "yyyy-MM-dd HH:mm:ss");
}
/**
* START {@link org.joda.time.DateTime}
*/
@Test
public void testToDateTime()
{
Assert.assertEquals(new DateTime(2015, 1, 1, 0, 0), DateHelper.toDateTime("2015-01-01", "yyyy-MM-dd"));
Assert.assertEquals(new DateTime(2015, 1, 1, 0, 0), DateHelper.toDateTime("2015-01-01", "yyyy-MM-dd"));
Assert.assertEquals(new DateTime(2015, 1, 1, 1, 1, 1), DateHelper.toDateTime("2015-01-01 01:01:01", "yyyy-MM-dd HH:mm:ss"));
Assert.assertEquals(new DateTime(2015, 1, 1, 1, 1, 1), DateHelper.toDateTime("2015-01-01 01:01:01", "yyyy-MM-dd HH:mm:ss"));
}
@Test(expected = IllegalArgumentException.class)
public void testToDateTimeWithWrongFormat()
{
DateHelper.toDateTime("2015-01-01 01:01:01", "");
}
@Test(expected = IllegalArgumentException.class)
public void testToDateTimeWithWrongDate()
{
DateHelper.toDateTime("", "yyyy-MM-dd HH:mm:ss");
}
/**
* START java.time.*
*/
@Test
public void testToZoneDateTime()
{
ZonedDateTime zonedDateTime = DateHelper.toZonedDateTime("2015-07-08T12:21:30.667+02:00", DateTimeFormatter.ISO_OFFSET_DATE_TIME).withNano(0);
Assertions.assertThat(zonedDateTime).isIn(ZonedDateTime.of(2015, 7, 8, 10, 21, 30, 0, ZoneId.of("+02:00")), ZonedDateTime.of(2015, 7, 8, 12, 21, 30, 0, ZoneId.of("+02:00")));
}
@Test(expected = IllegalArgumentException.class)
public void testToZoneDateTimeWithWrongFormat()
{
DateHelper.toZonedDateTime("2015-07-08T12:21:30.667+02:00", null);
}
@Test(expected = IllegalArgumentException.class)
public void testToZoneDateTimeWithWrongDate()
{
DateHelper.toZonedDateTime("", DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
@Test
public void testToLocalDateTime()
{
Assert.assertEquals(LocalDateTime.of(2015, 7, 8, 12, 21, 30, 0), DateHelper.toLocalDateTime("2015-07-08T12:21:30.667+02:00", DateTimeFormatter.ISO_OFFSET_DATE_TIME).withNano(0));
}
@Test(expected = IllegalArgumentException.class)
public void testToLocalDateTimeWithWrongFormat()
{
DateHelper.toLocalDateTime("2015-07-08T12:21:30.667+02:00", null);
}
@Test(expected = IllegalArgumentException.class)
public void testToLocalDateTimeWithWrongDate()
{
DateHelper.toLocalDateTime("", DateTimeFormatter.ISO_OFFSET_DATE_TIME);
}
}
|
package com.statemachinesystems.envy;
import com.statemachinesystems.envy.example.DummyConfigSource;
import org.junit.Before;
import org.junit.Test;
import java.io.*;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.List;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.*;
public class EnvyTest {
public interface Config {
String foo();
String bar();
}
public interface AnotherConfig {
String foo();
String bar();
}
private ValueParserFactory valueParserFactory;
private DummyConfigSource configSource;
@Before
public void setUp() {
List<ValueParser<?>> valueParsers = new ArrayList<ValueParser<?>>(Envy.defaultValueParsers());
valueParserFactory = new ValueParserFactory(valueParsers);
configSource = new DummyConfigSource()
.add("foo", "foo")
.add("bar", "bar");
}
private Envy envy() {
return new Envy(valueParserFactory, configSource);
}
@Test
public void equalsMethodOnProxyReturnsTrueForSameInstance() {
Config config = envy().proxy(Config.class);
assertEquals(config, config);
}
@Test
public void equalsMethodOnProxyReturnsTrueForEqualInstances() {
Config config1 = envy().proxy(Config.class);
Config config2 = envy().proxy(Config.class);
assertEquals(config1, config2);
assertEquals(config2, config1);
}
@Test
public void equalsMethodOnProxyReturnsFalseForUnequalInstances() {
DummyConfigSource configSource1 = new DummyConfigSource()
.add("foo", "foo1")
.add("bar", "bar");
DummyConfigSource configSource2 = new DummyConfigSource()
.add("foo", "foo2")
.add("bar", "bar");
Config config1 = new Envy(valueParserFactory, configSource1).proxy(Config.class);
Config config2 = new Envy(valueParserFactory, configSource2).proxy(Config.class);
assertNotEquals(config1, config2);
assertNotEquals(config2, config1);
}
@Test
public void equalsMethodOnProxyReturnsFalseForInstanceOfAnotherProxyClass() {
Config config1 = envy().proxy(Config.class);
AnotherConfig config2 = envy().proxy(AnotherConfig.class);
assertNotEquals(config1, config2);
assertNotEquals(config2, config1);
}
@Test
public void equalsMethodOnProxyReturnsFalseForUnrelatedProxyInstance() {
ClassLoader classLoader = AnotherConfig.class.getClassLoader();
InvocationHandler invocationHandler = new InvocationHandler() {
@Override
public Object invoke(Object o, Method method, Object[] objects) throws Throwable {
return true;
}
};
Config config1 = envy().proxy(Config.class);
AnotherConfig config2 = (AnotherConfig) Proxy.newProxyInstance(classLoader,
new Class<?>[]{AnotherConfig.class}, invocationHandler);
assertNotEquals(config1, config2);
}
@Test
public void equalsMethodOnProxyReturnsFalseForInstanceOfAnotherNonProxyClass() {
Config config = envy().proxy(Config.class);
assertNotEquals(config, "some string");
}
@Test
public void equalsMethodOnProxiesReturnsFalseForNullArgument() {
Config config = envy().proxy(Config.class);
assertFalse(config.equals(null));
}
@Test
public void hashCodeMethodReturnsTheSameResultForEqualProxyInstances() {
Config config1 = envy().proxy(Config.class);
Config config2 = envy().proxy(Config.class);
assertThat(config1.hashCode(), is(config2.hashCode()));
}
@Test
public void hashCodeMethodReturnsDifferentResultsForUnequalProxyInstances() {
DummyConfigSource configSource1 = new DummyConfigSource()
.add("foo", "foo1")
.add("bar", "bar");
DummyConfigSource configSource2 = new DummyConfigSource()
.add("foo", "foo2")
.add("bar", "bar");
Config config1 = new Envy(valueParserFactory, configSource1).proxy(Config.class);
Config config2 = new Envy(valueParserFactory, configSource2).proxy(Config.class);
assertNotEquals(config1.hashCode(), config2.hashCode());
}
@Test
public void proxyInstancesAreSerializable() throws IOException, ClassNotFoundException {
Config config = envy().proxy(Config.class);
ByteArrayOutputStream out = new ByteArrayOutputStream();
new ObjectOutputStream(out).writeObject(config);
Object deserialized = new ObjectInputStream(new ByteArrayInputStream(out.toByteArray())).readObject();
assertThat(deserialized, instanceOf(Config.class));
assertThat((Config) deserialized, is(config));
}
}
|
package io.usethesource.vallang;
import java.io.IOException;
import java.io.StringReader;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Random;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.junit.jupiter.api.extension.ExtensionContext;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import io.usethesource.vallang.exceptions.FactTypeUseException;
import io.usethesource.vallang.io.StandardTextReader;
import io.usethesource.vallang.random.RandomValueGenerator;
import io.usethesource.vallang.type.Type;
import io.usethesource.vallang.type.TypeFactory;
import io.usethesource.vallang.type.TypeStore;
/**
* This value provider generates automatically/randomly values for test parameters of type:
* IValueFactory
* TypeFactory
* TypeStore
* IValue
* IList
* ISet
* IMap
* IInteger
* IReal
* INumber
* IRational
* INode
* IConstructor
* ITuple
* ISourceLocation
*
* If the class under test has a static field called "store" of type TypeStore, then this
* typestore will be passed to all parameters of type TypeStore instead of a fresh/empty TypeStore.
*/
public class ValueProvider implements ArgumentsProvider {
private static final Random rnd = new Random();
private static final boolean enableAnnotations = true;
private static final TypeFactory tf = TypeFactory.getInstance();
/**
* We use this to accidentally generate arguments which are the same as the previous
* once in a while:
*/
private IValue previous = null;
/**
* Every vallang test is run using all implementations of IValueFactory.
*/
private static final IValueFactory[] factories = {
io.usethesource.vallang.impl.reference.ValueFactory.getInstance(),
io.usethesource.vallang.impl.persistent.ValueFactory.getInstance()
};
/**
* The random value generator is parametrized by the valuefactory at creation time.
* We need to keep the reference due get better randomized results between (re-runs of)
* individual tests.
*/
private static final RandomValueGenerator[] generators = {
new RandomValueGenerator(factories[0], rnd, 5, 10, enableAnnotations),
new RandomValueGenerator(factories[1], rnd, 5, 10, enableAnnotations)
};
/**
* This trivial class helps with streaming generated test inputs, and some other stuff.
*/
private static class Tuple<A,B> {
public A a;
public B b;
public Tuple(A a, B b) {
this.a = a;
this.b = b;
}
public static <C,D> Tuple<C,D> of(C c, D d) {
return new Tuple<>(c, d);
}
}
/**
* Maps Java class literals of sub-types of IValue to the corresponding function which will
* generate a (random) instance of a type that all instances of such Java classes could have.
* Only composite types will actually be random.
*/
private static final Map<Class<? extends IValue>, BiFunction<TypeStore, ExpectedType, Type>> types =
Stream.<Tuple<Class<? extends IValue>, BiFunction<TypeStore, ExpectedType, Type>>>of(
Tuple.of(IInteger.class, (ts, n) -> tf.integerType()),
Tuple.of(IBool.class, (ts, n) -> tf.boolType()),
Tuple.of(IReal.class, (ts, n) -> tf.realType()),
Tuple.of(IRational.class, (ts, n) -> tf.rationalType()),
Tuple.of(INumber.class, (ts, n) -> tf.numberType()),
Tuple.of(IString.class, (ts, n) -> tf.stringType()),
Tuple.of(ISourceLocation.class, (ts, n) -> tf.sourceLocationType()),
Tuple.of(IValue.class, (ts, n) -> tf.valueType()),
Tuple.of(INode.class, (ts, n) -> tf.nodeType()),
Tuple.of(IList.class, (ts, n) -> tf.listType(tf.randomType())),
Tuple.of(ISet.class, (ts, n) -> tf.setType(tf.randomType())),
Tuple.of(ITuple.class, (ts, n) -> tf.tupleType(tf.randomType(), tf.randomType())),
Tuple.of(IMap.class, (ts, n) -> tf.mapType(tf.randomType(), tf.randomType())),
Tuple.of(IConstructor.class, (ts, n) -> randomADT(ts, n))
).collect(Collectors.toMap(t -> t.a, t -> t.b));
@Override
public Stream<? extends Arguments> provideArguments(ExtensionContext context) {
Method method = context.getTestMethod().get();
/*
* If only factories and typestores are arguments, we generate as many tests as we have
* value factory implementations (2). For the IValue argument we generate 100 tests and for
* every additional IValue argument we multiply the number of tests by 10.
*/
long valueArity = Arrays.stream(method.getParameterTypes()).filter(x -> IValue.class.isAssignableFrom(x)).count();
int numberOfTests = Math.max(1, 100 * (int) Math.pow(10, valueArity - 1));
return Stream.of(
Tuple.of(factories[0], generators[0]), // every factory has its own generator
Tuple.of(factories[1], generators[1])
).flatMap(vf -> // all parameters share the same factory
generateTypeStore(context).flatMap(ts ->
Stream.iterate(arguments(method, vf, ts), p -> arguments(method, vf, ts)).limit(numberOfTests)
)
);
}
private static Type randomADT(TypeStore ts, ExpectedType n) {
if (n != null) {
Type adt = ts.lookupAbstractDataType(n.value());
if (adt != null) {
return adt;
}
else {
throw new IllegalArgumentException(n.value() + " is not declared by the given TypeStore");
}
}
Collection<Type> allADTs = ts.getAbstractDataTypes();
if (!allADTs.isEmpty()) {
return allADTs.stream().skip(new Random().nextInt(allADTs.size())).findFirst().get();
}
// note the side-effect in the type store!
Type x = tf.abstractDataType(ts, "X");
tf.constructor(ts, x, "x");
return x;
}
/**
* Generate the random argument for a single test method
* @param method the declaration of the method under test
* @param vf the valuefactory to use when generating values, also passed to parameters of type IValueFactory
* @param ts the TypeStore to request ADTs from, randomly, also passed to parameters of type TypeStore
* @return an Arguments instance for streaming into JUnits MethodSource interface.
*/
private Arguments arguments(Method method, Tuple<IValueFactory, RandomValueGenerator> vf, TypeStore ts) {
return Arguments.of(Arrays.stream(method.getParameters()).map(cl -> argument(vf, ts, cl.getType(), cl.getAnnotation(ExpectedType.class))).toArray());
}
/**
* Generate an argument to a vallang test function. `cls` can be any sub-type of IValue,
* or TypeStore or IValueFactory.
* @param vf the valuefactory to use when generating values, also passed to parameters of type IValueFactory
* @param ts the TypeStore to request ADTs from, randomly, also passed to parameters of type TypeStore
* @param cls the class type of the parameter to generate an input for
* @return a random object which is assignable to cls
*/
private Object argument(Tuple<IValueFactory, RandomValueGenerator> vf, TypeStore ts, Class<?> cls, ExpectedType name) {
if (cls.isAssignableFrom(IValueFactory.class)) {
return vf.a;
}
else if (cls.isAssignableFrom(TypeStore.class)) {
return ts;
}
else if (cls.isAssignableFrom(TypeFactory.class)) {
return TypeFactory.getInstance();
}
else if (IValue.class.isAssignableFrom(cls)) {
return generateValue(vf, ts, cls.asSubclass(IValue.class), name);
}
else {
throw new IllegalArgumentException(cls + " is not assignable from IValue, IValueFactory, TypeStore or TypeFactory");
}
}
/**
* Generate a random IValue instance
*
* @param vf the valuefactory/randomgenerator to use
* @param ts the TypeStore to draw ADT constructors from
* @param cl the `cl` (sub-type of `IValue`) to be assignable to
* @return an instance assignable to `cl`
*/
private IValue generateValue(Tuple<IValueFactory, RandomValueGenerator> vf, TypeStore ts, Class<? extends IValue> cl, ExpectedType name) {
Type expectedType = types.getOrDefault(cl, (x, n) -> tf.valueType()).apply(ts, name);
Random rnd = vf.b.getRandom();
if (previous != null && rnd.nextInt(4) == 0 && previous.getType().isSubtypeOf(expectedType)) {
return rnd.nextBoolean() ? previous : reinstantiate(vf.a, ts, previous);
}
return (previous = vf.b.generate(expectedType, ts, Collections.emptyMap()));
}
/**
* Produces a value which equals the input `val` but is not the same object reference.
* It does this by serializing the value and parsing it again with the same expected type.
* @return a value equals to `val` (val.equals(returnValue)) but not reference equal (val != returnValue)
*/
private IValue reinstantiate(IValueFactory vf, TypeStore ts, IValue val) {
try {
return new StandardTextReader().read(vf, ts, val.getType(), new StringReader(val.toString()));
} catch (FactTypeUseException | IOException e) {
System.err.println("WARNING: value reinstantation via serialization failed. Reusing reference.");
return val;
}
}
/**
* Generates a TypeStore instance by importing the static `store` field of the class-under-test (if-present)
* in a fresh TypeStore. Otherwise it generates a fresh and empty TypeStore.
* @param context
* @return
*/
private Stream<TypeStore> generateTypeStore(ExtensionContext context) {
try {
return Stream.of(new TypeStore((TypeStore) context.getRequiredTestClass().getField("store").get("null")));
} catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) {
return Stream.of(new TypeStore());
}
}
}
|
package net.imagej.ops.math;
import net.imagej.ops.AbstractNamespaceTest;
import net.imagej.ops.MathOps.Abs;
import net.imagej.ops.MathOps.Add;
import net.imagej.ops.MathOps.AddNoise;
import net.imagej.ops.MathOps.And;
import net.imagej.ops.MathOps.Arccos;
import net.imagej.ops.MathOps.Arccosh;
import net.imagej.ops.MathOps.Arccot;
import net.imagej.ops.MathOps.Arccoth;
import net.imagej.ops.MathOps.Arccsc;
import net.imagej.ops.MathOps.Arccsch;
import net.imagej.ops.MathOps.Arcsec;
import net.imagej.ops.MathOps.Arcsech;
import net.imagej.ops.MathOps.Arcsin;
import net.imagej.ops.MathOps.Arcsinh;
import net.imagej.ops.MathOps.Arctan;
import net.imagej.ops.MathOps.Arctanh;
import net.imagej.ops.MathOps.Ceil;
import org.junit.Test;
/**
* Tests that the ops of the math namespace have corresponding type-safe Java
* method signatures declared in the {@link MathNamespace} class.
*
* @author Curtis Rueden
*/
public class MathNamespaceTest extends AbstractNamespaceTest {
/** Tests for {@link Abs} method convergence. */
@Test
public void testAbs() {
assertComplete("math", MathNamespace.class, Abs.NAME);
}
/** Tests for {@link Add} method convergence. */
@Test
public void testAdd() {
assertComplete("math", MathNamespace.class, Add.NAME);
}
/** Tests for {@link AddNoise} method convergence. */
@Test
public void testAddNoise() {
assertComplete("math", MathNamespace.class, AddNoise.NAME);
}
/** Tests for {@link And} method convergence. */
@Test
public void testAnd() {
assertComplete("math", MathNamespace.class, And.NAME);
}
/** Tests for {@link Arccos} method convergence. */
@Test
public void testArccos() {
assertComplete("math", MathNamespace.class, Arccos.NAME);
}
/** Tests for {@link Arccosh} method convergence. */
@Test
public void testArccosh() {
assertComplete("math", MathNamespace.class, Arccosh.NAME);
}
/** Tests for {@link Arccot} method convergence. */
@Test
public void testArccot() {
assertComplete("math", MathNamespace.class, Arccot.NAME);
}
/** Tests for {@link Arccoth} method convergence. */
@Test
public void testArccoth() {
assertComplete("math", MathNamespace.class, Arccoth.NAME);
}
/** Tests for {@link Arccsc} method convergence. */
@Test
public void testArccsc() {
assertComplete("math", MathNamespace.class, Arccsc.NAME);
}
/** Tests for {@link Arccsch} method convergence. */
@Test
public void testArccsch() {
assertComplete("math", MathNamespace.class, Arccsch.NAME);
}
/** Tests for {@link Arcsec} method convergence. */
@Test
public void testArcsec() {
assertComplete("math", MathNamespace.class, Arcsec.NAME);
}
/** Tests for {@link Arcsech} method convergence. */
@Test
public void testArcsech() {
assertComplete("math", MathNamespace.class, Arcsech.NAME);
}
/** Tests for {@link Arcsin} method convergence. */
@Test
public void testArcsin() {
assertComplete("math", MathNamespace.class, Arcsin.NAME);
}
/** Tests for {@link Arcsinh} method convergence. */
@Test
public void testArcsinh() {
assertComplete("math", MathNamespace.class, Arcsinh.NAME);
}
/** Tests for {@link Arctan} method convergence. */
@Test
public void testArctan() {
assertComplete("math", MathNamespace.class, Arctan.NAME);
}
/** Tests for {@link Arctanh} method convergence. */
@Test
public void testArctanh() {
assertComplete("math", MathNamespace.class, Arctanh.NAME);
}
/** Tests for {@link Ceil} method convergence. */
@Test
public void testCeil() {
assertComplete("math", MathNamespace.class, Ceil.NAME);
}
}
|
package pro.cucumber;
import org.junit.Before;
import org.junit.Test;
import pro.cucumber.gitcli.GitCliRevisionProvider;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.regex.Pattern;
import static org.junit.Assert.assertTrue;
public abstract class RevisionProviderContract {
private Path rootPath;
@Before
public void createScm() throws IOException {
rootPath = Files.createTempDirectory("GitWC");
Path subfolder = rootPath.resolve("subfolder");
Files.createDirectory(subfolder);
Files.createFile(subfolder.resolve("file"));
Exec.cmd("git init", rootPath);
Exec.cmd("git add -A", rootPath);
Exec.cmd("git commit -am \"files\"", rootPath);
System.out.println(Exec.cmd("ls -al", rootPath));
}
@Test
public void findsRev() {
String sha1Pattern = "^[a-f0-9]{40}$";
RevisionProvider revisionProvider = makeRevisionProvider(rootPath);
assertTrue("Expected a sha1", Pattern.matches(sha1Pattern, revisionProvider.getRev()));
}
protected abstract RevisionProvider makeRevisionProvider(Path rootPath);
}
|
package reactor.core.test;
import java.io.IOException;
import java.util.Arrays;
import org.junit.Assert;
import org.junit.Test;
import reactor.core.publisher.Flux;
import reactor.fn.Consumer;
/**
* @author Anatoly Kadyshev
* @author Brian Clozel
* @author Sebastien Deleuze
*/
public class TestSubscriberTests {
@Test
public void assertSubscribed() {
TestSubscriber<String> ts = new TestSubscriber<>();
ts.assertNotSubscribed();
Flux.just("foo").subscribe(ts);
ts.assertSubscribed();
}
@Test
public void assertValues() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.<String>empty().subscribe(ts);
ts.assertNoValues();
ts = new TestSubscriber<>(0);
Flux.just("foo", "bar").subscribe(ts);
ts.assertNoValues();
ts.request(1);
ts.assertValueCount(1);
ts.assertValues("foo");
ts.request(1);
ts.assertValueCount(2);
ts.assertValues("foo", "bar");
}
@Test(expected = AssertionError.class)
public void assertValuesNotSameValue() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo").subscribe(ts);
ts.assertValues("bar");
}
@Test(expected = AssertionError.class)
public void assertValuesNotSameCount() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo", "foo").subscribe(ts);
ts.assertValues("foo");
}
@Test
public void assertValuesWith() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo", "bar").subscribe(ts);
ts.assertValuesWith(value -> value.equals("foo"), value -> value.equals("bar"));
}
@Test(expected = AssertionError.class)
public void assertValuesWithFailure() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo", "bar").subscribe(ts);
ts.assertValuesWith(value -> Assert.assertEquals("foo", value), value -> Assert.assertEquals("foo", value));
}
@Test
public void assertValueSequence() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo", "bar").subscribe(ts);
ts.assertValueSequence(Arrays.asList("foo", "bar"));
}
@Test(expected = AssertionError.class)
public void assertValueSequenceFailure() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo", "bar").subscribe(ts);
ts.assertValueSequence(Arrays.asList("foo", "foo"));
}
@Test
public void assertComplete() {
TestSubscriber<String> ts = new TestSubscriber<>(0);
Flux.just("foo").subscribe(ts);
ts.assertNotComplete();
ts.request(1);
ts.assertComplete();
}
@Test
public void assertError() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.just("foo").subscribe(ts);
ts.assertNoError();
ts = new TestSubscriber<>(0);
Flux.<String>error(new IllegalStateException()).subscribe(ts);
ts.assertError();
ts.assertError(IllegalStateException.class);
try {
ts.assertError(IOException.class);
}
catch (AssertionError e) {
Assert.assertNotNull(e);
}
catch(Throwable e) {
Assert.fail();
}
}
@Test
public void assertTerminated() {
TestSubscriber<String> ts = new TestSubscriber<>();
Flux.<String>error(new IllegalStateException()).subscribe(ts);
ts.assertTerminated();
ts = new TestSubscriber<>(0);
Flux.just("foo").subscribe(ts);
ts.assertNotTerminated();
ts.request(1);
ts.assertTerminated();
}
@Test
public void awaitAndAssertValues() {
TestSubscriber<String> ts = new TestSubscriber<>(1);
Flux.just("1", "2").log().subscribe(ts);
ts.awaitAndAssertValues("1");
ts.request(1);
ts.awaitAndAssertValues("2");
}
@Test
public void awaitAndAssertValuesWith() {
TestSubscriber<Long> ts = new TestSubscriber<>(1);
Consumer<Long> greaterThanZero = new Consumer<Long>() {
@Override
public void accept(Long aLong) {
Assert.assertTrue(aLong > 0L);
}
};
Flux.just(1L, 2L).log().subscribe(ts);
ts.awaitAndAssertValuesWith(greaterThanZero);
ts.request(1);
ts.awaitAndAssertValuesWith(greaterThanZero);
}
@Test(expected = AssertionError.class)
public void awaitAndAssertValuesWithFailure() {
TestSubscriber<Long> ts = new TestSubscriber<>(1);
Flux.just(1L, 20L).log().subscribe(ts);
Consumer<Long> lowerThanTen = new Consumer<Long>() {
@Override
public void accept(Long aLong) {
Assert.assertTrue(aLong < 10L);
}
};
ts.awaitAndAssertValuesWith(lowerThanTen);
ts.request(1);
ts.awaitAndAssertValuesWith(lowerThanTen);
}
@Test
public void awaitAndAssertValueCount() {
TestSubscriber<Long> ts = new TestSubscriber<>(1);
Flux.just(1L, 2L).log().subscribe(ts);
ts.awaitAndAssertValueCount(1);
ts.request(1);
ts.awaitAndAssertValueCount(1);
}
@Test(expected = AssertionError.class)
public void awaitAndAssertValueCountFailure() {
TestSubscriber<Long> subscriber = new TestSubscriber<>();
Flux.just(1L).log().subscribe(subscriber);
subscriber.configureValuesTimeout(1).awaitAndAssertValueCount(2);
}
}
|
package roart.service;
import roart.service.ServiceParam.Function;
import roart.util.EurekaConstants;
import roart.util.EurekaUtil;
import javax.servlet.http.*;
import java.util.Vector;
import java.util.Enumeration;
import java.util.ArrayList;
import java.util.List;
import java.io.*;
import roart.common.searchengine.SearchEngineSearchParam;
import roart.common.searchengine.SearchEngineSearchResult;
import roart.config.ConfigConstants;
import roart.config.MyConfig;
import roart.config.NodeConfig;
import roart.model.ResultItem;
import roart.model.SearchDisplay;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.vaadin.ui.UI;
public class SearchService {
private Logger log = LoggerFactory.getLogger(this.getClass());
public void searchme(String str, String type) {
SearchEngineSearchParam param = new SearchEngineSearchParam();
param.conf = getConfig();
param.str = str;
// TODO fix
//param .type = type;
SearchEngineSearchResult result = EurekaUtil.sendMe(SearchEngineSearchResult.class, param, getAppName(), EurekaConstants.SEARCH);
return;
}
// TODO fix
public static SearchDisplay getSearchDisplay(UI ui) {
SearchDisplay d = new SearchDisplay();
d.highlightmlt = true;
d.classify = true;
d.admindisplay = true;
return d;
}
public static boolean isHighlightMLT() {
return MyConfig.conf.highlightmlt;
}
public void searchsimilar(String md5) {
SearchEngineSearchParam param = new SearchEngineSearchParam();
param.conf = getConfig();
param.str = md5;
SearchEngineSearchResult result = EurekaUtil.sendMe(SearchEngineSearchResult.class, param, getAppName(), EurekaConstants.SEARCHMLT);
return;
}
private String getAppName() {
return EurekaConstants.AETHER;
}
private NodeConfig getConfig() {
return MyConfig.conf;
}
}
|
package seedu.taskboss.logic;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static seedu.taskboss.commons.core.Messages.MESSAGE_INVALID_COMMAND_FORMAT;
import static seedu.taskboss.commons.core.Messages.MESSAGE_INVALID_TASK_DISPLAYED_INDEX;
import static seedu.taskboss.commons.core.Messages.MESSAGE_UNKNOWN_COMMAND;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import com.google.common.eventbus.Subscribe;
import seedu.taskboss.commons.core.EventsCenter;
import seedu.taskboss.commons.events.model.TaskBossChangedEvent;
import seedu.taskboss.commons.events.ui.JumpToListRequestEvent;
import seedu.taskboss.commons.events.ui.ShowHelpRequestEvent;
import seedu.taskboss.commons.exceptions.IllegalValueException;
import seedu.taskboss.logic.commands.AddCommand;
import seedu.taskboss.logic.commands.ClearCommand;
import seedu.taskboss.logic.commands.Command;
import seedu.taskboss.logic.commands.CommandResult;
import seedu.taskboss.logic.commands.DeleteCommand;
import seedu.taskboss.logic.commands.ExitCommand;
import seedu.taskboss.logic.commands.FindCommand;
import seedu.taskboss.logic.commands.HelpCommand;
import seedu.taskboss.logic.commands.ListCommand;
import seedu.taskboss.logic.commands.SelectCommand;
import seedu.taskboss.logic.commands.exceptions.CommandException;
import seedu.taskboss.logic.commands.exceptions.InvalidDatesException;
import seedu.taskboss.logic.parser.DateTimeParser;
import seedu.taskboss.model.Model;
import seedu.taskboss.model.ModelManager;
import seedu.taskboss.model.ReadOnlyTaskBoss;
import seedu.taskboss.model.TaskBoss;
import seedu.taskboss.model.category.Category;
import seedu.taskboss.model.category.UniqueCategoryList;
import seedu.taskboss.model.task.DateTime;
import seedu.taskboss.model.task.Information;
import seedu.taskboss.model.task.Name;
import seedu.taskboss.model.task.PriorityLevel;
import seedu.taskboss.model.task.ReadOnlyTask;
import seedu.taskboss.model.task.Task;
import seedu.taskboss.storage.StorageManager;
public class LogicManagerTest {
@Rule
public TemporaryFolder saveFolder = new TemporaryFolder();
private Model model;
private Logic logic;
// These are for checking the correctness of the events raised
private ReadOnlyTaskBoss latestSavedTaskBoss;
private boolean helpShown;
private int targetedJumpIndex;
@Subscribe
private void handleLocalModelChangedEvent(TaskBossChangedEvent abce) {
latestSavedTaskBoss = new TaskBoss(abce.data);
}
@Subscribe
private void handleShowHelpRequestEvent(ShowHelpRequestEvent she) {
helpShown = true;
}
@Subscribe
private void handleJumpToListRequestEvent(JumpToListRequestEvent je) {
targetedJumpIndex = je.targetIndex;
}
@Before
public void setUp() {
model = new ModelManager();
String tempTaskBossFile = saveFolder.getRoot().getPath() + "TempTaskBoss.xml";
String tempPreferencesFile = saveFolder.getRoot().getPath() + "TempPreferences.json";
logic = new LogicManager(model, new StorageManager(tempTaskBossFile, tempPreferencesFile));
EventsCenter.getInstance().registerHandler(this);
latestSavedTaskBoss = new TaskBoss(model.getTaskBoss()); // last saved
// assumed to be up to date
helpShown = false;
targetedJumpIndex = -1; // non yet
}
@After
public void tearDown() {
EventsCenter.clearSubscribers();
}
@Test
public void execute_invalid() throws IllegalValueException, InvalidDatesException {
String invalidCommand = " ";
assertCommandFailure(invalidCommand, String.format(MESSAGE_INVALID_COMMAND_FORMAT,
HelpCommand.MESSAGE_USAGE));
}
private void assertCommandSuccess(String inputCommand, String expectedMessage,
ReadOnlyTaskBoss expectedTaskBoss,
List<? extends ReadOnlyTask> expectedShownList) throws IllegalValueException, InvalidDatesException {
assertCommandBehavior(false, inputCommand, expectedMessage, expectedTaskBoss, expectedShownList);
}
private void assertCommandFailure(String inputCommand, String expectedMessage) throws IllegalValueException,
InvalidDatesException {
TaskBoss expectedTaskBoss = new TaskBoss(model.getTaskBoss());
List<ReadOnlyTask> expectedShownList = new ArrayList<>(model.getFilteredTaskList());
assertCommandBehavior(true, inputCommand, expectedMessage, expectedTaskBoss, expectedShownList);
}
private void assertCommandBehavior(boolean isCommandExceptionExpected, String inputCommand,
String expectedMessage, ReadOnlyTaskBoss expectedTaskBoss,
List<? extends ReadOnlyTask> expectedShownList) throws IllegalValueException, InvalidDatesException {
try {
CommandResult result = logic.execute(inputCommand);
assertFalse("CommandException expected but was not thrown.", isCommandExceptionExpected);
assertEquals(expectedMessage, result.feedbackToUser);
} catch (CommandException e) {
assertTrue("CommandException not expected but was thrown.", isCommandExceptionExpected);
assertEquals(expectedMessage, e.getMessage());
}
// Confirm the ui display elements should contain the right data
assertEquals(expectedShownList, model.getFilteredTaskList());
// Confirm the state of data (saved and in-memory) is as expected
assertEquals(expectedTaskBoss, model.getTaskBoss());
assertEquals(expectedTaskBoss, latestSavedTaskBoss);
}
@Test
public void execute_unknownCommandWord() throws IllegalValueException, InvalidDatesException {
String unknownCommand = "uicfhmowqewca";
assertCommandFailure(unknownCommand, MESSAGE_UNKNOWN_COMMAND);
}
@Test
public void execute_help() throws IllegalValueException, InvalidDatesException {
assertCommandSuccess("help", HelpCommand.SHOWING_HELP_MESSAGE,
new TaskBoss(), Collections.emptyList());
assertTrue(helpShown);
}
@Test
public void execute_helpShortCommand() throws IllegalValueException, InvalidDatesException {
assertCommandSuccess("h", HelpCommand.SHOWING_HELP_MESSAGE,
new TaskBoss(), Collections.emptyList());
assertTrue(helpShown);
}
@Test
public void execute_exit() throws IllegalValueException, InvalidDatesException {
assertCommandSuccess("exit", ExitCommand.MESSAGE_EXIT_ACKNOWLEDGEMENT,
new TaskBoss(), Collections.emptyList());
}
@Test
public void execute_exitShortCommand() throws IllegalValueException, InvalidDatesException {
assertCommandSuccess("x", ExitCommand.MESSAGE_EXIT_ACKNOWLEDGEMENT,
new TaskBoss(), Collections.emptyList());
}
@Test
public void execute_clear() throws Exception {
TestDataHelper helper = new TestDataHelper();
model.addTask(helper.generateTask(1));
model.addTask(helper.generateTask(2));
model.addTask(helper.generateTask(3));
assertCommandSuccess("clear", ClearCommand.MESSAGE_SUCCESS, new TaskBoss(), Collections.emptyList());
}
@Test
public void execute_add_invalidArgsFormat() throws IllegalValueException, InvalidDatesException {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, AddCommand.MESSAGE_USAGE);
assertCommandFailure("add Valid Name p/Yes sd/today ed/tomorrow", expectedMessage);
}
@Test
public void execute_add_invalidTaskData() throws IllegalValueException, InvalidDatesException {
assertCommandFailure("add n/[]\\[;] sd/today ed/tomorrow i/valid, information",
Name.MESSAGE_NAME_CONSTRAINTS);
assertCommandFailure("add n/Valid Name! sd/today ed/tomorrow "
+ "i/valid, information c/invalid_-[.category",
Category.MESSAGE_CATEGORY_CONSTRAINTS);
assertCommandFailure("add n/Valid Name sd/today to next week ed/tomorrow i/valid, information",
DateTimeParser.getMultipleDatesError());
assertCommandFailure("add n/Valid Name sd/invalid date ed/tomorroq i/valid, information",
DateTime.MESSAGE_DATE_CONSTRAINTS);
}
@Test
public void execute_add_successful() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
TaskBoss expectedAB = new TaskBoss();
expectedAB.addTask(toBeAdded);
// execute command and verify result
assertCommandSuccess(helper.generateAddCommand(toBeAdded),
String.format(AddCommand.MESSAGE_SUCCESS, toBeAdded),
expectedAB, expectedAB.getTaskList());
}
@Test
public void execute_addDuplicate_notAllowed() throws Exception {
// setup expectations
TestDataHelper helper = new TestDataHelper();
Task toBeAdded = helper.adam();
// setup starting state
model.addTask(toBeAdded); // task already in internal TaskBoss
// execute command and verify result
assertCommandFailure(helper.generateAddCommand(toBeAdded), AddCommand.MESSAGE_DUPLICATE_TASK);
}
@Test
public void execute_list_showsAllTasks() throws Exception {
// prepare expectations
TestDataHelper helper = new TestDataHelper();
TaskBoss expectedTB = helper.generateTaskBoss(2);
List<? extends ReadOnlyTask> expectedList = expectedTB.getTaskList();
// prepare TaskBoss state
helper.addToModel(model, 2);
assertCommandSuccess("list", ListCommand.MESSAGE_SUCCESS, expectedTB, expectedList);
}
@Test
public void execute_listShortCommand_showsAllTasks() throws Exception {
// prepare expectations
TestDataHelper helper = new TestDataHelper();
TaskBoss expectedTB = helper.generateTaskBoss(2);
List<? extends ReadOnlyTask> expectedList = expectedTB.getTaskList();
// prepare TaskBoss state
helper.addToModel(model, 2);
assertCommandSuccess("l", ListCommand.MESSAGE_SUCCESS, expectedTB, expectedList);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given
* command targeting a single task in the shown list, using visible index.
*
* @param commandWord to test assuming it targets a single task in the last shown
* list based on visible index.
*/
private void assertIncorrectIndexFormatBehaviorForCommand(String commandWord, String expectedMessage)
throws Exception {
assertCommandFailure(commandWord, expectedMessage); // index missing
assertCommandFailure(commandWord + " +1", expectedMessage); // index should be unsigned
assertCommandFailure(commandWord + " -1", expectedMessage); // index should be unsigned
assertCommandFailure(commandWord + " 0", expectedMessage);
assertCommandFailure(commandWord + " not_a_number", expectedMessage);
}
/**
* Confirms the 'invalid argument index number behaviour' for the given
* command targeting a single task in the shown list, using visible index.
*
* @param commandWord
* to test assuming it targets a single task in the last shown
* list based on visible index.
*/
private void assertIndexNotFoundBehaviorForCommand(String commandWord) throws Exception {
String expectedMessage = MESSAGE_INVALID_TASK_DISPLAYED_INDEX;
TestDataHelper helper = new TestDataHelper();
List<Task> taskList = helper.generateTaskList(2);
// set AB state to 2 tasks
model.resetData(new TaskBoss());
for (Task p : taskList) {
model.addTask(p);
}
assertCommandFailure(commandWord + " 3", expectedMessage);
}
@Test
public void execute_selectInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, SelectCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("select", expectedMessage);
}
@Test
public void execute_selectIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("select");
}
@Test
public void execute_select_jumpsToCorrectTask() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threeTasks = helper.generateTaskList(3);
TaskBoss expectedAB = helper.generateTaskBoss(threeTasks);
helper.addToModel(model, threeTasks);
assertCommandSuccess("select 2", String.format(SelectCommand.MESSAGE_SELECT_TASK_SUCCESS, 2), expectedAB,
expectedAB.getTaskList());
assertEquals(1, targetedJumpIndex);
assertEquals(model.getFilteredTaskList().get(1), threeTasks.get(1));
}
@Test
public void execute_deleteInvalidArgsFormat_errorMessageShown() throws Exception {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, DeleteCommand.MESSAGE_USAGE);
assertIncorrectIndexFormatBehaviorForCommand("delete", expectedMessage);
}
@Test
public void execute_deleteIndexNotFound_errorMessageShown() throws Exception {
assertIndexNotFoundBehaviorForCommand("delete");
}
@Test
public void execute_delete_removesCorrectTask() throws Exception {
TestDataHelper helper = new TestDataHelper();
List<Task> threeTasks = helper.generateTaskList(3);
TaskBoss expectedAB = helper.generateTaskBoss(threeTasks);
expectedAB.removeTask(threeTasks.get(1));
helper.addToModel(model, threeTasks);
assertCommandSuccess("delete 2", String.format(DeleteCommand.MESSAGE_DELETE_TASK_SUCCESS,
threeTasks.get(1)),
expectedAB, expectedAB.getTaskList());
}
@Test
public void execute_find_invalidArgsFormat() throws IllegalValueException, InvalidDatesException {
String expectedMessage = String.format(MESSAGE_INVALID_COMMAND_FORMAT, FindCommand.MESSAGE_USAGE);
assertCommandFailure("find ", expectedMessage);
}
@Test
public void execute_findName_onlyMatchesFullWordsInNames() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithName("bla bla KEY bla");
Task pTarget2 = helper.generateTaskWithName("bla KEY bla bceofeia");
Task p1 = helper.generateTaskWithName("KE Y");
Task p2 = helper.generateTaskWithName("KEYKEYKEY sduauo");
List<Task> fourTasks = helper.generateTaskList(p1, pTarget1, p2, pTarget2);
TaskBoss expectedAB = helper.generateTaskBoss(fourTasks);
List<Task> expectedList = helper.generateTaskList(pTarget1, pTarget2);
helper.addToModel(model, fourTasks);
assertCommandSuccess("find n/KEY",
Command.getMessageForTaskListShownSummary(expectedList.size()), expectedAB, expectedList);
}
@Test
public void execute_findName_isNotCaseSensitive() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task p1 = helper.generateTaskWithName("bla bla KEY bla");
Task p2 = helper.generateTaskWithName("bla KEY bla bceofeia");
Task p3 = helper.generateTaskWithName("key key");
Task p4 = helper.generateTaskWithName("KEy sduauo");
List<Task> fourTasks = helper.generateTaskList(p3, p1, p4, p2);
TaskBoss expectedAB = helper.generateTaskBoss(fourTasks);
List<Task> expectedList = fourTasks;
helper.addToModel(model, fourTasks);
assertCommandSuccess("find n/KEY",
Command.getMessageForTaskListShownSummary(expectedList.size()), expectedAB, expectedList);
}
@Test
public void execute_findName_matchesIfAnyKeywordPresent() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithName("bla bla KEY bla");
Task pTarget2 = helper.generateTaskWithName("bla rAnDoM bla bceofeia");
Task pTarget3 = helper.generateTaskWithName("key key");
Task p1 = helper.generateTaskWithName("sduauo");
List<Task> fourTasks = helper.generateTaskList(pTarget1, p1, pTarget2, pTarget3);
TaskBoss expectedAB = helper.generateTaskBoss(fourTasks);
List<Task> expectedList = helper.generateTaskList(pTarget1, pTarget2, pTarget3);
helper.addToModel(model, fourTasks);
assertCommandSuccess("find n/key rAnDoM", Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB, expectedList);
}
@Test
public void execute_findStartDatetime_matchesOnlyIfKeywordPresentInOrder() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithStartDateTime("Monday, 13 March, 2017");
Task p1 = helper.generateTaskWithStartDateTime("16 March, 2017");
Task p2 = helper.generateTaskWithStartDateTime("Monday, 1 May, 2017");
Task p3 = helper.generateTaskWithStartDateTime("2 July, 2017");
List<Task> fourTasks = helper.generateTaskList(pTarget1, p1, p2, p3);
TaskBoss expectedTB = helper.generateTaskBoss(fourTasks);
List<Task> expectedList = helper.generateTaskList(pTarget1, p1);
helper.addToModel(model, fourTasks);
assertCommandSuccess("find sd/Mar", Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedTB, expectedList);
}
@Test
public void execute_findEndDatetime_matchesOnlyIfKeywordPresentInOrder() throws Exception {
TestDataHelper helper = new TestDataHelper();
Task pTarget1 = helper.generateTaskWithEndDateTime("Monday, 13 March, 2017");
Task p1 = helper.generateTaskWithEndDateTime("16 March, 2017");
Task p2 = helper.generateTaskWithEndDateTime("Monday, 1 May, 2017");
Task p3 = helper.generateTaskWithEndDateTime("2 July, 2017");
List<Task> fourTasks = helper.generateTaskList(pTarget1, p1, p2, p3);
TaskBoss expectedAB = helper.generateTaskBoss(fourTasks);
List<Task> expectedList = helper.generateTaskList(pTarget1, p1);
helper.addToModel(model, fourTasks);
assertCommandSuccess("find ed/Mar", Command.getMessageForTaskListShownSummary(expectedList.size()),
expectedAB, expectedList);
}
/**
* A utility class to generate test data.
*/
class TestDataHelper {
Task adam() throws Exception {
Name name = new Name("Adam Brown");
PriorityLevel privatePriorityLevel = new PriorityLevel("Yes");
DateTime startDateTime = new DateTime("today 5pm");
DateTime endDateTime = new DateTime("tomorrow 8pm");
Information privateInformation = new Information("111, alpha street");
Category category1 = new Category("category1");
Category category2 = new Category("longercategory2");
UniqueCategoryList categories = new UniqueCategoryList(category1, category2);
return new Task(name, privatePriorityLevel, startDateTime,
endDateTime, privateInformation, categories);
}
/**
* Generates a valid task using the given seed. Running this function
* with the same parameter values guarantees the returned task will have
* the same state. Each unique seed will generate a unique Task object.
*
* @param seed
* used to generate the task data field values
*/
Task generateTask(int seed) throws Exception {
return new Task(
new Name("Task " + seed),
new PriorityLevel("Yes"),
new DateTime("Feb 19 10am 2017"),
new DateTime("Feb 20 10am 2017"),
new Information("House of " + seed),
new UniqueCategoryList(new Category("category" + Math.abs(seed)),
new Category("category" + Math.abs(seed + 1)))
);
}
private String generateAddCommand(Task p) throws IllegalValueException {
StringBuffer cmd = new StringBuffer();
cmd.append("add ");
//@@author A0144904H
if (p.getPriorityLevel().equals(PriorityLevel.PRIORITY_NO)) {
cmd.append(" n/").append(p.getName().toString());
} else {
cmd.append(" n/").append(p.getName().toString() + "!");
}
cmd.append(" sd/").append(p.getStartDateTime().toString());
cmd.append(" ed/").append(p.getEndDateTime().toString());
cmd.append(" i/").append(p.getInformation());
UniqueCategoryList categories = p.getCategories();
for (Category t : categories) {
cmd.append(" c/").append(t.categoryName);
}
return cmd.toString();
}
/**
* Generates an TaskBoss with auto-generated tasks.
*/
TaskBoss generateTaskBoss(int numGenerated) throws Exception {
TaskBoss taskBoss = new TaskBoss();
addToTaskBoss(taskBoss, numGenerated);
return taskBoss;
}
/**
* Generates TaskBoss based on the list of Tasks given.
*/
TaskBoss generateTaskBoss(List<Task> tasks) throws Exception {
TaskBoss taskBoss = new TaskBoss();
addToTaskBoss(taskBoss, tasks);
return taskBoss;
}
/**
* Adds auto-generated Task objects to the given TaskBoss
*
* @param taskBoss
* The TaskBoss to which the Tasks will be added
*/
void addToTaskBoss(TaskBoss taskBoss, int numGenerated) throws Exception {
addToTaskBoss(taskBoss, generateTaskList(numGenerated));
}
/**
* Adds the given list of Tasks to the given TaskBoss
*/
void addToTaskBoss(TaskBoss taskBoss, List<Task> tasksToAdd) throws Exception {
for (Task t : tasksToAdd) {
taskBoss.addTask(t);
}
}
/**
* Adds auto-generated Task objects to the given model
*
* @param model
* The model to which the Tasks will be added
*/
void addToModel(Model model, int numGenerated) throws Exception {
addToModel(model, generateTaskList(numGenerated));
}
/**
* Adds the given list of Tasks to the given model
*/
void addToModel(Model model, List<Task> tasksToAdd) throws Exception {
for (Task t : tasksToAdd) {
model.addTask(t);
}
}
/**
* Generates a list of Tasks based on the flags.
*/
List<Task> generateTaskList(int numGenerated) throws Exception {
List<Task> tasks = new ArrayList<>();
for (int i = 1; i <= numGenerated; i++) {
tasks.add(generateTask(i));
}
return tasks;
}
List<Task> generateTaskList(Task... tasks) {
return Arrays.asList(tasks);
}
/**
* Generates a Task object with given name. Other fields will have some
* dummy values.
*/
Task generateTaskWithName(String name) throws Exception {
return new Task(
new Name(name),
new PriorityLevel("Yes"),
new DateTime("Feb 19 10am 2017"),
new DateTime("Feb 20 10am 2017"),
new Information("House of 1"),
new UniqueCategoryList(new Category("category"))
);
}
/**
* Generates a Task object with given startDatetime. Other fields will have some
* dummy values.
*/
Task generateTaskWithStartDateTime(String startDatetime) throws Exception {
return new Task(
new Name("testTask"),
new PriorityLevel("Yes"),
new DateTime(startDatetime),
new DateTime("Feb 20 10am 2018"),
new Information("House of 1"),
new UniqueCategoryList(new Category("category"))
);
}
/**
* Generates a Task object with given endDatetime. Other fields will have some
* dummy values.
*/
Task generateTaskWithEndDateTime(String endDatetime) throws Exception {
return new Task(
new Name("testTask"),
new PriorityLevel("Yes"),
new DateTime("Feb 20 10am 2017"),
new DateTime(endDatetime),
new Information("House of 1"),
new UniqueCategoryList(new Category("category"))
);
}
}
}
|
package org.jdesktop.swingx;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.util.ArrayList;
import java.util.List;
import javax.swing.AbstractAction;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JPanel;
import org.jdesktop.swingx.JXHeader;
/**
* Visual tests of JXBusyLabel issues.
* @author had
*
*/
public class JXBusyLabelVisualCheck extends InteractiveTestCase {
private static JDialog createDialog(JFrame owner) {
JDialog d = new JDialog(owner, "Dialog");
d.setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE);
JXBusyLabel bl = new JXBusyLabel();
d.add(bl);
bl.setBusy(true);
d.setVisible(true);
return d;
}
/**
* Test for issue #795 - size is set to 0 using default const.
*/
public void interactiveNoArgConst() {
JFrame f = new JFrame();
f.setDefaultCloseOperation(JFrame.DISPOSE_ON_CLOSE);
final JXBusyLabel label = new JXBusyLabel();
label.setText("hi there");
f.add(label);
f.add(new JButton(new AbstractAction("click me") {
public void actionPerformed(ActionEvent e) {
label.setBusy(!label.isBusy());
}
}), BorderLayout.SOUTH);
f.pack();
f.setVisible(true);
}
/**
* Test for memory leak issue #626.
*/
public void interactiveMemoryLeak() {
final JFrame f = new JFrame();
JXHeader header = new JXHeader();
header.setTitle("<html><B>JXBusyLabelIssue #626</b>");
header.setDescription("<html>Memory leak test. By clicking 'start' button, creation of 100 "
+ "new dialogs will be triggered. Similarly 'Stop' button will then destroy all "
+ "created dialogs. It should be possible to see memory consumption on the "
+ "System.out as well as change in speed of busy label rotation when all dialogs "
+ "are created/destroyed.");
f.add(header, BorderLayout.NORTH);
JXBusyLabel bl = new JXBusyLabel();
f.add(bl);
bl.setBusy(true);
final List<JDialog> l = new ArrayList<JDialog>();
JPanel control = new JPanel();
JButton b = new JButton(new AbstractAction("Start") {
public void actionPerformed(ActionEvent e) {
System.gc();
System.out.println("Start mem(u):" + (Runtime.getRuntime().totalMemory() -
Runtime.getRuntime().freeMemory()));
// just reset the model
for (int i = 0; i < 100; i++) {
l.add(createDialog(f));
}
System.gc();
System.out.println("alloc mem(u):" + (Runtime.getRuntime().totalMemory() -
Runtime.getRuntime().freeMemory()));
}
});
control.add(b);
b = new JButton(new AbstractAction("Stop") {
public void actionPerformed(ActionEvent e) {
// just reset the model
for (int i = 0; i < 100; i++) {
if (l.size() > 0) {
JDialog d = l.remove(0);
d.dispose();
}
}
System.gc();
System.out.println("Stop mem(u):" + (Runtime.getRuntime().totalMemory() -
Runtime.getRuntime().freeMemory()));
}
});
control.add(b);
f.add(control, BorderLayout.SOUTH);
f.setPreferredSize(new Dimension(400, 400));
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
f.pack();
f.setVisible(true);
}
/**
* @param args
*/
public static void main(String[] args) {
JXBusyLabelVisualCheck test = new JXBusyLabelVisualCheck();
try {
test.runInteractiveTests();
} catch (Exception e) {
System.err.println("exception when executing interactive tests:");
e.printStackTrace();
}
}
/**
* do nothing test - keep the testrunner happy.
*/
public void testDummy() {
}
}
|
package edu.wustl.cab2b.server.queryengine;
import edu.wustl.cab2b.common.queryengine.Cab2bQuery;
import edu.wustl.cab2b.common.queryengine.ICab2bParameterizedQuery;
import edu.wustl.common.querysuite.bizlogic.QueryBizLogic;
/**
* This class processes the Cab2bQuery object before persisting and after retreival.
* @author chetan_patil
*/
public class Cab2bQueryBizLogic extends QueryBizLogic<ICab2bParameterizedQuery> {
/**
* @see edu.wustl.common.querysuite.queryobject.util.QueryProcessor#getQueryClassName()
*/
protected String getQueryClassName() {
return Cab2bQuery.class.getName();
}
}
|
package net.sf.j2s.core.astvisitors;
import java.util.Iterator;
import java.util.List;
import org.eclipse.jdt.core.dom.ASTNode;
import org.eclipse.jdt.core.dom.AnonymousClassDeclaration;
import org.eclipse.jdt.core.dom.Block;
import org.eclipse.jdt.core.dom.ClassInstanceCreation;
import org.eclipse.jdt.core.dom.Expression;
import org.eclipse.jdt.core.dom.ExpressionStatement;
import org.eclipse.jdt.core.dom.IBinding;
import org.eclipse.jdt.core.dom.IMethodBinding;
import org.eclipse.jdt.core.dom.ITypeBinding;
import org.eclipse.jdt.core.dom.IVariableBinding;
import org.eclipse.jdt.core.dom.IfStatement;
import org.eclipse.jdt.core.dom.MethodDeclaration;
import org.eclipse.jdt.core.dom.MethodInvocation;
import org.eclipse.jdt.core.dom.Name;
import org.eclipse.jdt.core.dom.PrefixExpression;
import org.eclipse.jdt.core.dom.QualifiedName;
import org.eclipse.jdt.core.dom.SimpleName;
import org.eclipse.jdt.core.dom.Statement;
import org.eclipse.jdt.core.dom.WhileStatement;
public class SWTScriptVisitor extends ASTScriptVisitor {
/**
* Mark whether current statement are before or after SWT's while block.
* <code>
* ...
* shell.open();
* while (!shell.isDisposed()) {
* if (!display.readAndDispatch())
* display.sleep();
* }
* display.dispose();
* ...
* </code>
*/
private boolean metSWTBlockWhile = false;
/**
* Mark whether current statement are before or after Dialog#open call.
* <code>
* ...
* dialog.open();
* ...
* </code>
*/
private boolean metDialogOpen = false;
/* (non-Javadoc)
* @see net.sf.j2s.core.astvisitors.ASTKeywordParser#skipDeclarePackages()
*/
protected String[] skipDeclarePackages() {
String swt = "org.eclipse.swt";
String[] swtInnerPackages = new String[]{
swt,
swt + ".accessibility",
swt + ".browser",
swt + ".custom",
swt + ".dnd",
swt + ".events",
swt + ".graphics",
swt + ".internal",
swt + ".internal.dnd",
swt + ".internal.browser",
swt + ".internal.struct",
swt + ".layout",
swt + ".widgets"
};
String[] pkgs = super.skipDeclarePackages();
String[] packages = new String[swtInnerPackages.length + pkgs.length];
System.arraycopy(pkgs, 0, packages, 0, pkgs.length);
System.arraycopy(swtInnerPackages, 0, packages, pkgs.length, swtInnerPackages.length);
return packages;
}
public boolean visit(SimpleName node) {
String constValue = checkConstantValue(node);
if (constValue != null) {
buffer.append(constValue);
return false;
}
IBinding binding = node.resolveBinding();
if (binding != null
&& binding instanceof ITypeBinding) {
ITypeBinding typeBinding = (ITypeBinding) binding;
if (typeBinding != null) {
String name = typeBinding.getQualifiedName();
if (name.startsWith("org.eclipse.swt.internal.xhtml.")
|| name.startsWith("net.sf.j2s.html.")) {
String identifier = node.getIdentifier();
if ("window".equals(identifier)) {
identifier = "w$";
} else if ("document".equals(identifier)) {
identifier = "d$";
}
buffer.append(identifier);
return false;
}
if ("org.eclipse.swt.internal.browser.OS".equals(name)) {
buffer.append("O$");
return false;
}
}
}
return super.visit(node);
}
public boolean visit(QualifiedName node) {
if (isSimpleQualified(node)) {
String constValue = checkConstantValue(node);
if (constValue != null) {
buffer.append(constValue);
return false;
}
}
Name qName = node.getQualifier();
String nodeStr = qName.toString();
if (nodeStr.equals("net.sf.j2s.html")
|| nodeStr.equals("org.eclipse.swt.internal.xhtml")) {
node.getName().accept(this);
return false;
}
node.getQualifier().accept(this);
buffer.append('.');
node.getName().accept(this);
return false;
}
public boolean visit(ClassInstanceCreation node) {
ITypeBinding binding = node.resolveTypeBinding();
if (binding != null) {
if (isTypeOf(binding, "org.eclipse.swt.internal.RunnableCompatibility")) {
buffer.append("Clazz.makeFunction (");
boolean result = super.visit(node);
buffer.append(")");
return result;
}
}
AnonymousClassDeclaration anonDeclare = node.getAnonymousClassDeclaration();
if (anonDeclare != null) {
} else {
String fqName = null;
String name = getTypeStringName(node.getType());
if (name != null) {
fqName = name;//.getFullyQualifiedName();
} else {
fqName = "noname";
}
fqName = shortenQualifiedName(fqName);
String filterKey = "org.eclipse.swt.internal.xhtml.";
if (fqName.startsWith(filterKey)) {
buffer.append(" new ");
buffer.append(fqName.substring(filterKey.length()));
buffer.append(" (");
visitList(node.arguments(), ", ");
buffer.append(")");
return false;
}
filterKey = "net.sf.j2s.html.";
if (fqName.startsWith(filterKey)) {
buffer.append(" new ");
buffer.append(fqName.substring(filterKey.length()));
buffer.append(" (");
visitList(node.arguments(), ", ");
buffer.append(")");
return false;
}
filterKey = "$wt.internal.xhtml.";
if (fqName.startsWith(filterKey)) {
buffer.append(" new ");
buffer.append(fqName.substring(filterKey.length()));
buffer.append(" (");
visitList(node.arguments(), ", ");
buffer.append(")");
return false;
}
}
return super.visit(node);
}
boolean isTypeOf(ITypeBinding binding, String clazzName) {
if (binding == null || clazzName == null || clazzName.length() == 0) {
return false;
}
if (clazzName.equals(binding.getBinaryName())) {
return true;
} else {
return isTypeOf(binding.getSuperclass(), clazzName);
}
}
public boolean visit(WhileStatement node) {
Expression exp = node.getExpression();
if (exp instanceof PrefixExpression) {
PrefixExpression preExp = (PrefixExpression) exp;
if ("!".equals(preExp.getOperator().toString())) {
Expression operand = preExp.getOperand();
if (operand instanceof MethodInvocation) {
MethodInvocation shellIsDisposed = (MethodInvocation) operand;
Expression shellExp = shellIsDisposed.getExpression();
if (shellExp != null) {
ITypeBinding typeBinding = shellExp.resolveTypeBinding();
if (isTypeOf(typeBinding, "org.eclipse.swt.widgets.Shell")) {
SimpleName methodName = shellIsDisposed.getName();
if ("isDisposed".equals(methodName.getIdentifier())) {
metSWTBlockWhile = true;
buffer.append("Sync2Async.block (");
shellExp.accept(this);
buffer.append(", this, function () {\r\n");
return false;
}
}
}
}
}
}
return super.visit(node);
}
public void endVisit(Block node) {
super.endVisit(node);
}
protected String[] getFilterMethods() {
return new String[] {
"org.eclipse.swt.widgets.Widget", "checkSubclass",
"org.eclipse.swt.widgets.Dialog", "checkSubclass",
"org.eclipse.swt.widgets.Widget", "checkWidget",
"org.eclipse.swt.widgets.Display", "checkDevice",
"org.eclipse.swt.graphics.Device", "checkDevice",
"org.eclipse.jface.util.Assert", "*",
"org.eclipse.core.internal.commands.util.Assert", "*",
"org.eclipse.core.internal.runtime.Assert", "*"
};
}
/* (non-Javadoc)
* @see net.sf.j2s.core.astvisitors.ASTScriptVisitor#visit(org.eclipse.jdt.core.dom.MethodInvocation)
*/
public boolean visit(MethodInvocation node) {
IMethodBinding methodBinding = node.resolveMethodBinding();
if (methodBinding != null && "open".equals(methodBinding.getName()) && methodBinding.getParameterTypes().length == 0) {
boolean isDialogBlock = false;
boolean isWindowBlock = false;
if ((isDialogBlock = Bindings.findTypeInHierarchy(methodBinding.getDeclaringClass(), "org.eclipse.swt.widgets.Dialog") != null)
|| (!getPackageName().startsWith("org.eclipse.jface.")
&& (isWindowBlock = Bindings.findTypeInHierarchy(methodBinding.getDeclaringClass(), "org.eclipse.jface.window.Window") != null))) {
int lastIndexOf1 = buffer.lastIndexOf(";\r\n");
if (lastIndexOf1 != -1) {
lastIndexOf1 += 3;
}
int lastIndexOf2 = buffer.lastIndexOf("}\r\n");
if (lastIndexOf2 != -1) {
lastIndexOf2 += 3;
}
int lastIndexOf3 = buffer.lastIndexOf("}");
if (lastIndexOf3 != -1) {
lastIndexOf3 += 1;
}
int lastIndexOf4 = buffer.lastIndexOf("{\r\n");
if (lastIndexOf4 != -1) {
lastIndexOf4 += 3;
}
int lastIndexOf5 = buffer.lastIndexOf("{");
if (lastIndexOf5 != -1) {
lastIndexOf5 += 1;
}
int lastIndexOf = -1;
if (lastIndexOf1 == -1 && lastIndexOf2 == -1
&& lastIndexOf3 == -1 && lastIndexOf1 == -1
&& lastIndexOf2 == -1 && lastIndexOf3 == -1) {
lastIndexOf = buffer.length(); // should never be in here!
} else {
lastIndexOf = Math.max(Math.max(Math.max(lastIndexOf1, lastIndexOf2), lastIndexOf3),
Math.max(lastIndexOf4, lastIndexOf5));
}
String s = buffer.substring(lastIndexOf);
buffer.delete(lastIndexOf, buffer.length());
if (isDialogBlock) {
buffer.append("DialogSync2Async.block (");
} else if (isWindowBlock) {
buffer.append("net.sf.j2s.ajax.AWindowDelegate.asyncOpen (");
}
node.getExpression().accept(this);
buffer.append(", this, function () {\r\n");
buffer.append(s);
node.getExpression().accept(this);
if (isDialogBlock) {
buffer.append(".dialogReturn");
} else if (isWindowBlock) {
buffer.append(".getReturnCode ()");
}
metDialogOpen = true;
return false;
}
}
if (methodBinding != null && "net.sf.j2s.ajax.junit.AsyncSWT".equals(methodBinding.getDeclaringClass().getQualifiedName())
&& "waitLayout".equals(methodBinding.getName())) {
metSWTBlockWhile = true;
node.getExpression().accept(this);
buffer.append(".waitLayout (");
Expression shellExp = (Expression) node.arguments().get(0);
shellExp.accept(this);
buffer.append(", ");
Expression runnableExp = (Expression) node.arguments().get(1);
runnableExp.accept(this);
buffer.append(", this, function () {\r\n
return false;
}
String[] filterMethods = getFilterMethods();
for (int i = 0; i < filterMethods.length; i += 2) {
if ("*".equals(filterMethods[i + 1])) {
if (methodBinding == null) {
continue;
}
ITypeBinding type = methodBinding.getDeclaringClass();
if (type != null && filterMethods[i].equals(type.getQualifiedName())) {
return false;
}
} else if (Bindings.isMethodInvoking(methodBinding, filterMethods[i], filterMethods[i + 1])) {
return false;
}
}
return super.visit(node);
}
/* (non-Javadoc)
* @see net.sf.j2s.core.astvisitors.ASTScriptVisitor#endVisit(org.eclipse.jdt.core.dom.MethodDeclaration)
*/
public void endVisit(MethodDeclaration node) {
IMethodBinding methodBinding = node.resolveBinding();
String[] filterMethods = getFilterMethods();
for (int i = 0; i < filterMethods.length; i += 2) {
if ("*".equals(filterMethods[i + 1])) {
if (methodBinding == null) {
continue;
}
ITypeBinding type = methodBinding.getDeclaringClass();
if (type != null && filterMethods[i].equals(type.getQualifiedName())) {
return;
}
} else if (Bindings.isMethodInvoking(methodBinding, filterMethods[i], filterMethods[i + 1])) {
return;
}
}
super.endVisit(node);
}
/* (non-Javadoc)
* @see net.sf.j2s.core.astvisitors.ASTScriptVisitor#visit(org.eclipse.jdt.core.dom.MethodDeclaration)
*/
public boolean visit(MethodDeclaration node) {
IMethodBinding methodBinding = node.resolveBinding();
String[] filterMethods = getFilterMethods();
for (int i = 0; i < filterMethods.length; i += 2) {
if ("*".equals(filterMethods[i + 1])) {
if (methodBinding == null) {
continue;
}
ITypeBinding type = methodBinding.getDeclaringClass();
if (type != null && filterMethods[i].equals(type.getQualifiedName())) {
return false;
}
} else if (Bindings.isMethodInvoking(methodBinding, filterMethods[i], filterMethods[i + 1])) {
return false;
}
}
return super.visit(node);
}
public boolean visit(Block node) {
int swtBlockWhileCount = 0;
int swtDialogOpenCount = 0;
boolean lastSWTBlockWhile = metSWTBlockWhile;
metSWTBlockWhile = false;
boolean lastDialogOpen = metDialogOpen;
metDialogOpen = false;
if (super.visit(node) == false) {
metSWTBlockWhile = lastSWTBlockWhile;
metDialogOpen = lastDialogOpen;
return false;
}
List statements = node.statements();
for (Iterator iter = statements.iterator(); iter.hasNext();) {
Statement stmt = (Statement) iter.next();
if (stmt instanceof ExpressionStatement) {
ExpressionStatement expStmt = (ExpressionStatement) stmt;
Expression exp = expStmt.getExpression();
String[] filterMethods = getFilterMethods();
boolean isContinue = false;
for (int i = 0; i < filterMethods.length; i += 2) {
if ("*".equals(filterMethods[i + 1])) {
continue;
} else if (Bindings.isMethodInvoking(exp, filterMethods[i], filterMethods[i + 1])) {
isContinue = true;
break;
}
}
if (isContinue) {
continue;
}
}
stmt.accept(this);
if (metSWTBlockWhile) {
swtBlockWhileCount++;
metSWTBlockWhile = false;
}
if (metDialogOpen) {
swtDialogOpenCount++;
metDialogOpen = false;
}
}
for (int i = 0; i < swtBlockWhileCount + swtDialogOpenCount; i++) {
buffer.append("});\r\n");
buffer.append("return;\r\n"); /* always return directly when dialog#open is called */
}
metSWTBlockWhile = lastSWTBlockWhile;
metDialogOpen = lastDialogOpen;
return false;
}
public boolean visit(IfStatement node) {
if (node.getElseStatement() == null) {
Statement thenStatement = node.getThenStatement();
if (thenStatement instanceof Block) {
Block block = (Block) thenStatement;
List statements = block.statements();
if (statements.size() == 1) {
thenStatement = (Statement) statements.get(0);
}
}
if (thenStatement instanceof ExpressionStatement) {
ExpressionStatement expStmt = (ExpressionStatement) thenStatement;
Expression exp = expStmt.getExpression();
if (Bindings.isMethodInvoking(exp, "org.eclipse.swt.widgets.Widget", "error")) {
return false;
}
if (Bindings.isMethodInvoking(exp, "org.eclipse.swt.SWT", "error")) {
return false;
}
if (Bindings.isMethodInvoking(exp, "org.eclipse.swt.widgets.Display", "error")) {
return false;
}
}
}
return super.visit(node);
}
}
|
package cuchaz.enigma.bytecode;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javassist.CtClass;
import javassist.bytecode.AttributeInfo;
import javassist.bytecode.BadBytecode;
import javassist.bytecode.ByteArray;
import javassist.bytecode.ClassFile;
import javassist.bytecode.CodeAttribute;
import javassist.bytecode.ConstPool;
import javassist.bytecode.Descriptor;
import javassist.bytecode.FieldInfo;
import javassist.bytecode.InnerClassesAttribute;
import javassist.bytecode.LocalVariableTypeAttribute;
import javassist.bytecode.MethodInfo;
import javassist.bytecode.SignatureAttribute;
import javassist.bytecode.SignatureAttribute.ArrayType;
import javassist.bytecode.SignatureAttribute.BaseType;
import javassist.bytecode.SignatureAttribute.ClassSignature;
import javassist.bytecode.SignatureAttribute.ClassType;
import javassist.bytecode.SignatureAttribute.MethodSignature;
import javassist.bytecode.SignatureAttribute.NestedClassType;
import javassist.bytecode.SignatureAttribute.ObjectType;
import javassist.bytecode.SignatureAttribute.Type;
import javassist.bytecode.SignatureAttribute.TypeArgument;
import javassist.bytecode.SignatureAttribute.TypeParameter;
import javassist.bytecode.SignatureAttribute.TypeVariable;
import cuchaz.enigma.mapping.ClassEntry;
import cuchaz.enigma.mapping.ClassNameReplacer;
import cuchaz.enigma.mapping.Translator;
public class ClassRenamer {
private static enum SignatureType {
Class {
@Override
public String rename(String signature, ReplacerClassMap map) {
return renameClassSignature(signature, map);
}
},
Field {
@Override
public String rename(String signature, ReplacerClassMap map) {
return renameFieldSignature(signature, map);
}
},
Method {
@Override
public String rename(String signature, ReplacerClassMap map) {
return renameMethodSignature(signature, map);
}
};
public abstract String rename(String signature, ReplacerClassMap map);
}
private static class ReplacerClassMap extends HashMap<String,String> {
private static final long serialVersionUID = 317915213205066168L;
private ClassNameReplacer m_replacer;
public ReplacerClassMap(ClassNameReplacer replacer) {
m_replacer = replacer;
}
@Override
public String get(Object obj) {
if (obj instanceof String) {
return get((String)obj);
}
return null;
}
public String get(String className) {
return m_replacer.replace(className);
}
}
public static void renameClasses(CtClass c, final Translator translator) {
renameClasses(c, new ClassNameReplacer() {
@Override
public String replace(String className) {
ClassEntry entry = translator.translateEntry(new ClassEntry(className));
if (entry != null) {
return entry.getName();
}
return null;
}
});
}
public static void moveAllClassesOutOfDefaultPackage(CtClass c, final String newPackageName) {
renameClasses(c, new ClassNameReplacer() {
@Override
public String replace(String className) {
ClassEntry entry = new ClassEntry(className);
if (entry.isInDefaultPackage()) {
return newPackageName + "/" + entry.getName();
}
return null;
}
});
}
public static void moveAllClassesIntoDefaultPackage(CtClass c, final String oldPackageName) {
renameClasses(c, new ClassNameReplacer() {
@Override
public String replace(String className) {
ClassEntry entry = new ClassEntry(className);
if (entry.getPackageName().equals(oldPackageName)) {
return entry.getSimpleName();
}
return null;
}
});
}
@SuppressWarnings("unchecked")
public static void renameClasses(CtClass c, ClassNameReplacer replacer) {
// sadly, we can't use CtClass.renameClass() because SignatureAttribute.renameClass() is extremely buggy =(
ReplacerClassMap map = new ReplacerClassMap(replacer);
ClassFile classFile = c.getClassFile();
// rename the constant pool (covers ClassInfo, MethodTypeInfo, and NameAndTypeInfo)
ConstPool constPool = c.getClassFile().getConstPool();
constPool.renameClass(map);
// rename class attributes
renameAttributes(classFile.getAttributes(), map, SignatureType.Class);
// rename methods
for (MethodInfo methodInfo : (List<MethodInfo>)classFile.getMethods()) {
methodInfo.setDescriptor(Descriptor.rename(methodInfo.getDescriptor(), map));
renameAttributes(methodInfo.getAttributes(), map, SignatureType.Method);
}
// rename fields
for (FieldInfo fieldInfo : (List<FieldInfo>)classFile.getFields()) {
fieldInfo.setDescriptor(Descriptor.rename(fieldInfo.getDescriptor(), map));
renameAttributes(fieldInfo.getAttributes(), map, SignatureType.Field);
}
// rename the class name itself last
// NOTE: don't use the map here, because setName() calls the buggy SignatureAttribute.renameClass()
// we only want to replace exactly this class name
String newName = renameClassName(c.getName(), map);
if (newName != null) {
c.setName(newName);
}
// replace simple names in the InnerClasses attribute too
InnerClassesAttribute attr = (InnerClassesAttribute)c.getClassFile().getAttribute(InnerClassesAttribute.tag);
if (attr != null) {
for (int i = 0; i < attr.tableLength(); i++) {
// get the inner class full name (which has already been translated)
ClassEntry classEntry = new ClassEntry(Descriptor.toJvmName(attr.innerClass(i)));
if (attr.innerNameIndex(i) != 0) {
// update the inner name
attr.setInnerNameIndex(i, constPool.addUtf8Info(classEntry.getInnermostClassName()));
}
/* DEBUG
System.out.println(String.format("\tDEOBF: %s-> ATTR: %s,%s,%s", classEntry, attr.outerClass(i), attr.innerClass(i), attr.innerName(i)));
*/
}
}
}
@SuppressWarnings("unchecked")
private static void renameAttributes(List<AttributeInfo> attributes, ReplacerClassMap map, SignatureType type) {
try {
// make the rename class method accessible
Method renameClassMethod = AttributeInfo.class.getDeclaredMethod("renameClass", Map.class);
renameClassMethod.setAccessible(true);
for (AttributeInfo attribute : attributes) {
if (attribute instanceof SignatureAttribute) {
// this has to be handled specially because SignatureAttribute.renameClass() is buggy as hell
SignatureAttribute signatureAttribute = (SignatureAttribute)attribute;
String newSignature = type.rename(signatureAttribute.getSignature(), map);
if (newSignature != null) {
signatureAttribute.setSignature(newSignature);
}
} else if (attribute instanceof CodeAttribute) {
// code attributes have signature attributes too (indirectly)
CodeAttribute codeAttribute = (CodeAttribute)attribute;
renameAttributes(codeAttribute.getAttributes(), map, type);
} else if (attribute instanceof LocalVariableTypeAttribute) {
// lvt attributes have signature attributes too
LocalVariableTypeAttribute localVariableAttribute = (LocalVariableTypeAttribute)attribute;
renameLocalVariableTypeAttribute(localVariableAttribute, map);
} else {
renameClassMethod.invoke(attribute, map);
}
}
} catch(NoSuchMethodException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
throw new Error("Unable to call javassist methods by reflection!", ex);
}
}
private static void renameLocalVariableTypeAttribute(LocalVariableTypeAttribute attribute, ReplacerClassMap map) {
// adapted from LocalVariableAttribute.renameClass()
ConstPool cp = attribute.getConstPool();
int n = attribute.tableLength();
byte[] info = attribute.get();
for (int i = 0; i < n; ++i) {
int pos = i * 10 + 2;
int index = ByteArray.readU16bit(info, pos + 6);
if (index != 0) {
String signature = cp.getUtf8Info(index);
String newSignature = renameLocalVariableSignature(signature, map);
if (newSignature != null) {
ByteArray.write16bit(cp.addUtf8Info(newSignature), info, pos + 6);
}
}
}
}
private static String renameLocalVariableSignature(String signature, ReplacerClassMap map) {
// for some reason, signatures with . in them don't count as field signatures
// looks like anonymous classes delimit with . in stead of $
// convert the . to $, but keep track of how many we replace
// we need to put them back after we translate
int start = signature.lastIndexOf('$') + 1;
int numConverted = 0;
StringBuilder buf = new StringBuilder(signature);
for (int i=buf.length()-1; i>=start; i
char c = buf.charAt(i);
if (c == '.') {
buf.setCharAt(i, '$');
numConverted++;
}
}
signature = buf.toString();
// translate
String newSignature = renameFieldSignature(signature, map);
if (newSignature != null) {
// put the delimiters back
buf = new StringBuilder(newSignature);
for (int i=buf.length()-1; i>=0 && numConverted > 0; i
char c = buf.charAt(i);
if (c == '$') {
buf.setCharAt(i, '.');
numConverted
}
}
assert(numConverted == 0);
newSignature = buf.toString();
return newSignature;
}
return null;
}
private static String renameClassSignature(String signature, ReplacerClassMap map) {
try {
ClassSignature type = renameType(SignatureAttribute.toClassSignature(signature), map);
if (type != null) {
return type.encode();
}
return null;
} catch (BadBytecode ex) {
throw new Error("Can't parse field signature: " + signature);
}
}
private static String renameFieldSignature(String signature, ReplacerClassMap map) {
try {
ObjectType type = renameType(SignatureAttribute.toFieldSignature(signature), map);
if (type != null) {
return type.encode();
}
return null;
} catch (BadBytecode ex) {
throw new Error("Can't parse class signature: " + signature);
}
}
private static String renameMethodSignature(String signature, ReplacerClassMap map) {
try {
MethodSignature type = renameType(SignatureAttribute.toMethodSignature(signature), map);
if (type != null) {
return type.encode();
}
return null;
} catch (BadBytecode ex) {
throw new Error("Can't parse method signature: " + signature);
}
}
private static ClassSignature renameType(ClassSignature type, ReplacerClassMap map) {
TypeParameter[] typeParamTypes = type.getParameters();
if (typeParamTypes != null) {
typeParamTypes = Arrays.copyOf(typeParamTypes, typeParamTypes.length);
for (int i=0; i<typeParamTypes.length; i++) {
TypeParameter newParamType = renameType(typeParamTypes[i], map);
if (newParamType != null) {
typeParamTypes[i] = newParamType;
}
}
}
ClassType superclassType = type.getSuperClass();
if (superclassType != ClassType.OBJECT) {
ClassType newSuperclassType = renameType(superclassType, map);
if (newSuperclassType != null) {
superclassType = newSuperclassType;
}
}
ClassType[] interfaceTypes = type.getInterfaces();
if (interfaceTypes != null) {
interfaceTypes = Arrays.copyOf(interfaceTypes, interfaceTypes.length);
for (int i=0; i<interfaceTypes.length; i++) {
ClassType newInterfaceType = renameType(interfaceTypes[i], map);
if (newInterfaceType != null) {
interfaceTypes[i] = newInterfaceType;
}
}
}
return new ClassSignature(typeParamTypes, superclassType, interfaceTypes);
}
private static MethodSignature renameType(MethodSignature type, ReplacerClassMap map) {
TypeParameter[] typeParamTypes = type.getTypeParameters();
if (typeParamTypes != null) {
typeParamTypes = Arrays.copyOf(typeParamTypes, typeParamTypes.length);
for (int i=0; i<typeParamTypes.length; i++) {
TypeParameter newParamType = renameType(typeParamTypes[i], map);
if (newParamType != null) {
typeParamTypes[i] = newParamType;
}
}
}
Type[] paramTypes = type.getParameterTypes();
if (paramTypes != null) {
paramTypes = Arrays.copyOf(paramTypes, paramTypes.length);
for (int i=0; i<paramTypes.length; i++) {
Type newParamType = renameType(paramTypes[i], map);
if (newParamType != null) {
paramTypes[i] = newParamType;
}
}
}
Type returnType = type.getReturnType();
if (returnType != null) {
Type newReturnType = renameType(returnType, map);
if (newReturnType != null) {
returnType = newReturnType;
}
}
ObjectType[] exceptionTypes = type.getExceptionTypes();
if (exceptionTypes != null) {
exceptionTypes = Arrays.copyOf(exceptionTypes, exceptionTypes.length);
for (int i=0; i<exceptionTypes.length; i++) {
ObjectType newExceptionType = renameType(exceptionTypes[i], map);
if (newExceptionType != null) {
exceptionTypes[i] = newExceptionType;
}
}
}
return new MethodSignature(typeParamTypes, paramTypes, returnType, exceptionTypes);
}
private static Type renameType(Type type, ReplacerClassMap map) {
if (type instanceof ObjectType) {
return renameType((ObjectType)type, map);
} else if (type instanceof BaseType) {
return renameType((BaseType)type, map);
} else {
throw new Error("Don't know how to rename type " + type.getClass());
}
}
private static ObjectType renameType(ObjectType type, ReplacerClassMap map) {
if (type instanceof ArrayType) {
return renameType((ArrayType)type, map);
} else if (type instanceof ClassType) {
return renameType((ClassType)type, map);
} else if (type instanceof TypeVariable) {
return renameType((TypeVariable)type, map);
} else {
throw new Error("Don't know how to rename type " + type.getClass());
}
}
private static BaseType renameType(BaseType type, ReplacerClassMap map) {
// don't have to rename primitives
return null;
}
private static TypeVariable renameType(TypeVariable type, ReplacerClassMap map) {
// don't have to rename template args
return null;
}
private static ClassType renameType(ClassType type, ReplacerClassMap map) {
// translate type args
TypeArgument[] args = type.getTypeArguments();
if (args != null) {
args = Arrays.copyOf(args, args.length);
for (int i=0; i<args.length; i++) {
TypeArgument newType = renameType(args[i], map);
if (newType != null) {
args[i] = newType;
}
}
}
if (type instanceof NestedClassType) {
NestedClassType nestedType = (NestedClassType)type;
// translate the name
String name = getClassName(type);
String newName = map.get(name);
if (newName != null) {
name = new ClassEntry(newName).getInnermostClassName();
}
// translate the parent class too
ClassType parent = renameType(nestedType.getDeclaringClass(), map);
if (parent == null) {
parent = nestedType.getDeclaringClass();
}
return new NestedClassType(parent, name, args);
} else {
// translate the name
String name = type.getName();
String newName = renameClassName(name, map);
if (newName != null) {
name = newName;
}
return new ClassType(name, args);
}
}
private static String getClassName(ClassType type) {
if (type instanceof NestedClassType) {
NestedClassType nestedType = (NestedClassType)type;
return getClassName(nestedType.getDeclaringClass()) + "$" + Descriptor.toJvmName(type.getName().replace('.', '$'));
} else {
return Descriptor.toJvmName(type.getName());
}
}
private static String renameClassName(String name, ReplacerClassMap map) {
String newName = map.get(Descriptor.toJvmName(name));
if (newName != null) {
return Descriptor.toJavaName(newName);
}
return null;
}
private static TypeArgument renameType(TypeArgument type, ReplacerClassMap map) {
ObjectType subType = type.getType();
if (subType != null) {
ObjectType newSubType = renameType(subType, map);
if (newSubType != null) {
switch (type.getKind()) {
case ' ': return new TypeArgument(newSubType);
case '+': return TypeArgument.subclassOf(newSubType);
case '-': return TypeArgument.superOf(newSubType);
default:
throw new Error("Unknown type kind: " + type.getKind());
}
}
}
return null;
}
private static ArrayType renameType(ArrayType type, ReplacerClassMap map) {
Type newSubType = renameType(type.getComponentType(), map);
if (newSubType != null) {
return new ArrayType(type.getDimension(), newSubType);
}
return null;
}
private static TypeParameter renameType(TypeParameter type, ReplacerClassMap map) {
ObjectType superclassType = type.getClassBound();
if (superclassType != null) {
ObjectType newSuperclassType = renameType(superclassType, map);
if (newSuperclassType != null) {
superclassType = newSuperclassType;
}
}
ObjectType[] interfaceTypes = type.getInterfaceBound();
if (interfaceTypes != null) {
interfaceTypes = Arrays.copyOf(interfaceTypes, interfaceTypes.length);
for (int i=0; i<interfaceTypes.length; i++) {
ObjectType newInterfaceType = renameType(interfaceTypes[i], map);
if (newInterfaceType != null) {
interfaceTypes[i] = newInterfaceType;
}
}
}
return new TypeParameter(type.getName(), superclassType, interfaceTypes);
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Gomoku;
import java.awt.Color;
import java.awt.Graphics;
import javax.swing.JOptionPane;
/**
*
* @author Administrator
*/
public class MyPanel extends javax.swing.JPanel {
private int isGo;
private int isMyTurn;
private int isWin;
int startX = 40;
int startY = 40;
int lineNum = 15;
int space = 40;
int currentX;
int currentY;
private int[][] arr = new int[lineNum][lineNum];
private int[][] opponentArr = new int[lineNum][lineNum];
Color chessColor = Color.blue;
Color opponentColor = Color.white;
ChessController controller;
/**
* Creates new form MyPanel
*/
public MyPanel() {
isGo = 0;
isMyTurn = 0;
isWin = 0;
for (int i = 0; i < lineNum; i++) {
for (int j = 0; j < lineNum; j++) {
arr[i][j] = 0;
opponentArr[i][j] = 0;
}
}
//arrClient[3][5]=1;
addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
formMouseClicked(evt);
}
});
initComponents();
}
@Override
protected void paintComponent(Graphics g) {
super.paintComponent(g);
g.setColor(Color.black);
for (int i = 0; i < lineNum; i++) {
g.drawLine(startX + space * i, startY, startX + space * i, startY + space * (lineNum - 1));
}
for (int i = 0; i < lineNum; i++) {
g.drawLine(startX, startY + space * i, startX + space * (lineNum - 1), startY + space * i);
}
for (int i = 0; i < lineNum; i++) {
for (int j = 0; j < lineNum; j++) {
if (getArr()[i][j] == 1) {
g.setColor(chessColor);
g.fillOval(i * space + startX - 15, j * space + startY - 15, 30, 30);
}
if (getOpponentArr()[i][j] == 1) {
g.setColor(opponentColor);
g.fillOval(i * space + startX - 15, j * space + startY - 15, 30, 30);
}
}
}
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
formMouseClicked(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 400, Short.MAX_VALUE)
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGap(0, 300, Short.MAX_VALUE)
);
}// </editor-fold>//GEN-END:initComponents
private void formMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_formMouseClicked
// TODO add your handling code here:
if (getIsMyTurn() == 1) {
currentX = evt.getX();
currentY = evt.getY();
int i = -1, j = -1;
if ((currentX - startX) % space <= 15) {
i = (currentX - startX) / space;
}
if ((currentX - startX) % space >= 25) {
i = (currentX - startX) / space + 1;
}
if ((currentY - startY) % space <= 15) {
j = (currentY - startY) / space;
}
if ((currentY - startY) % space >= 25) {
j = (currentY - startY) / space + 1;
}
if (i >= 0 && j >= 0 && getArr()[i][j] == 0 && getOpponentArr()[i][j] == 0) {
getArr()[i][j] = 1;
ChessJudge judge = new ChessJudge(getArr(), i, j);
if (judge.iswin()) {
JOptionPane.showMessageDialog(null, "");
}
setIsMyTurn(0);
setIsGo(1);
}
}
this.repaint();
}//GEN-LAST:event_formMouseClicked
// Variables declaration - do not modify//GEN-BEGIN:variables
// End of variables declaration//GEN-END:variables
/**
* @return the arr
*/
public int[][] getArr() {
return arr;
}
/**
* @param arr the arr to set
*/
public void setArr(int[][] arr) {
this.arr = arr;
}
/**
* @return the opponentArr
*/
public int[][] getOpponentArr() {
return opponentArr;
}
/**
* @param opponentArr the opponentArr to set
*/
public void setOpponentArr(int[][] opponentArr) {
this.opponentArr = opponentArr;
}
/**
* @return the isGo
*/
public int getIsGo() {
return isGo;
}
/**
* @param isGo the isGo to set
*/
public void setIsGo(int isGo) {
this.isGo = isGo;
}
/**
* @return the isMyTurn
*/
public int getIsMyTurn() {
return isMyTurn;
}
/**
* @param isMyTurn the isMyTurn to set
*/
public void setIsMyTurn(int isMyTurn) {
this.isMyTurn = isMyTurn;
}
/**
* @return the isWin
*/
public int getIsWin() {
return isWin;
}
/**
* @param isWin the isWin to set
*/
public void setIsWin(int isWin) {
this.isWin = isWin;
}
}
|
package edu.bellevue.android;
import java.util.List;
import android.app.ListActivity;
import android.app.ProgressDialog;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.preference.PreferenceManager;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import android.widget.Toast;
import edu.bellevue.android.blackboard.BlackboardHelper;
import edu.bellevue.android.blackboard.Forum;
public class ThreadActivity extends ListActivity {
private static final int THREAD_COMPLETE = 1;
private static final int CONN_NOT_ALLOWED = 2;
private static final int CONN_NOT_POSSIBLE = 3;
private List<edu.bellevue.android.blackboard.Thread> threads;
private String courseId;
private String forumId;
private String confId;
private String friendlyName;
private SharedPreferences prefs;
private Context ctx;
private Handler handler;
private ProgressDialog pd;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.listview);
ctx = this.getApplicationContext();
prefs = PreferenceManager.getDefaultSharedPreferences(ctx);
handler = new threadHandler();
Bundle extras = getIntent().getExtras();
courseId = extras.getString("course_id");
confId = extras.getString("conf_id");
forumId = extras.getString("forum_id");
friendlyName = extras.getString("name");
setTitle(friendlyName + " - Threads");
pd = ProgressDialog.show(this, "Please Wait", "Loading Threads...");
Thread t = new Thread(new getThreadsThread());
t.start();
}
public boolean onCreateOptionsMenu(Menu m)
{
m.add("New Thread");
m.add("Settings");
return super.onCreateOptionsMenu(m);
}
public boolean onOptionsItemSelected(MenuItem mi)
{
if (mi.getTitle().equals("Settings"))
{
Intent i = new Intent(this,PrefActivity.class);
startActivity(i);
}
if (mi.getTitle().equals("New Thread"))
{
Toast.makeText(this, "Not Implemented Yet!", Toast.LENGTH_LONG).show();
}
return true;
}
protected class threadHandler extends Handler
{
public void handleMessage(Message m)
{
pd.dismiss();
switch(m.what)
{
case THREAD_COMPLETE:
setListAdapter(new ThreadAdapter(ctx, android.R.layout.simple_list_item_1,threads));
break;
case CONN_NOT_ALLOWED:
ConnChecker.showUnableToConnect(ThreadActivity.this);
finish();
break;
case CONN_NOT_POSSIBLE:
Toast.makeText(ThreadActivity.this, "No Active Network Found", Toast.LENGTH_SHORT).show();
finish();
}
}
}
protected class getThreadsThread implements Runnable
{
public void run() {
if (ConnChecker.shouldConnect(prefs, ctx))
{
threads = BlackboardHelper.getThreads(forumId,confId,courseId);
handler.sendEmptyMessage(THREAD_COMPLETE);
}else
{
if (ConnChecker.getConnType(ctx).equals("NoNetwork"))
{
handler.sendEmptyMessage(CONN_NOT_POSSIBLE);
}else
{
handler.sendEmptyMessage(CONN_NOT_ALLOWED);
}
}
}
}
private class ThreadAdapter extends ArrayAdapter<edu.bellevue.android.blackboard.Thread> {
private List<edu.bellevue.android.blackboard.Thread> items;
public ThreadAdapter(Context context, int textViewResourceId, List<edu.bellevue.android.blackboard.Thread> items) {
super(context, textViewResourceId, items);
this.items = items;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View v = convertView;
if (v == null) {
LayoutInflater vi = (LayoutInflater)getSystemService(Context.LAYOUT_INFLATER_SERVICE);
v = vi.inflate(R.layout.threadrow, null);
}
edu.bellevue.android.blackboard.Thread o = items.get(position);
if (o != null) {
TextView tt = (TextView) v.findViewById(R.id.toptext);
TextView mt = (TextView) v.findViewById(R.id.middletext);
TextView bt = (TextView) v.findViewById(R.id.bottomtext);
if (tt != null) {
tt.setText(o.threadName);
}
if (mt != null) {
mt.setText("By: " + o.threadAuthor + " On: "+o.threadDate);
}
if(bt != null){
bt.setText("Total Posts: "+ o.pCount + " Unread: " + o.uCount);
}
}
return v;
}
}
}
|
package edu.dynamic.dynamiz.storage;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.Stack;
import java.util.TreeMap;
import edu.dynamic.dynamiz.controller.DataFileReadWrite;
import edu.dynamic.dynamiz.parser.OptionType;
import edu.dynamic.dynamiz.structure.MyDate;
import edu.dynamic.dynamiz.structure.EndDateComparator;
import edu.dynamic.dynamiz.structure.EventItem;
import edu.dynamic.dynamiz.structure.PriorityComparator;
import edu.dynamic.dynamiz.structure.StartDateComparator;
import edu.dynamic.dynamiz.structure.TaskItem;
import edu.dynamic.dynamiz.structure.ToDoItem;
/**
* Defines the storage class holding the list of tasks and events.
*
* Public Methods(currently that can be used)
* static Storage getInstance() //gets the Storage instance
* ToDoItem addItem(ToDoItem item) //Adds the given item to the list.
* ToDoItem[] getList(OptionsType[] optionsList) //Gets the list of ToDoItem objects held by this storage.
* ToDoItem removeItem(String id) //Removes the item with the specified id from this storage.
* ToDoItem[] searchItems(String keyword, int priority, MyDate start, MyDate end, OptionType[] optList) //Gets a list of items with the given parameter values.
* ToDoItem[] updateItem(String id, String description, int priority, Date start, Date end) //Updates the ToDoItem with the given id with the specified details.
*
* @author zixian
*/
public class Storage {
//Main data members
private ArrayList<ToDoItem> mainList; //The main list
private ArrayList<ToDoItem> toDoItemList; //Holds items without any dates
private ArrayList<EventItem> eventList; //Holds events
private ArrayList<TaskItem> taskList; //Holds deadline tasks
private TreeMap<String, ToDoItem> searchTree; //Maps each item to its ID for faster search by ID
private Stack<ToDoItem> completedList; //The list of completed items
private static Storage storage; //Holds the only instance of the Storage object
/**
* Creates a new instance of Storage.
*/
private Storage(){
mainList = DataFileReadWrite.getListFromFile();
searchTree = new TreeMap<String, ToDoItem>();
toDoItemList = new ArrayList<ToDoItem>();
eventList = new ArrayList<EventItem>();
taskList = new ArrayList<TaskItem>();
completedList = new Stack<ToDoItem>();
Iterator<ToDoItem> itr = mainList.iterator();
ToDoItem temp;
while(itr.hasNext()){
temp = itr.next();
searchTree.put(temp.getId(), temp);
if(temp instanceof TaskItem){
taskList.add((TaskItem)temp);
} else if(temp instanceof EventItem){
eventList.add((EventItem)temp);
} else{
toDoItemList.add(temp);
}
}
}
/**
* Gets the Storage instance.
* @return The only Storage instance of this class.
*/
public static Storage getInstance(){
if(storage==null)
storage = new Storage();
return storage;
}
/**
* Adds the given item to the list. For use by CommandDelete's undo method.
* @param item The ToDoItem to be added to the list.
* @return The TodoItem that is added to the list.
*/
public ToDoItem addItem(ToDoItem item){
//item must not be null.
assert item!=null;
mainList.add(item);
searchTree.put(item.getId(), item);
if(item instanceof TaskItem){
taskList.add((TaskItem)item);
} else if(item instanceof EventItem){
eventList.add((EventItem)item);
} else{
toDoItemList.add(item);
}
try {
DataFileReadWrite.writeListToFile(mainList, "output.txt");
} catch (IOException e) {
}
return item;
}
public ToDoItem[] updateItem(String id, String description, int priority, MyDate start, MyDate end) {
assert id!=null && !id.isEmpty();
ToDoItem[] list = new ToDoItem[2];
ToDoItem target = searchTree.get(id);
if(target==null){
throw new IllegalArgumentException("No such ID");
}
//Makes a copy of the current version of the object
if(target instanceof TaskItem){
list[0] = new TaskItem((TaskItem)target);
} else if(target instanceof EventItem){
list[0] = new EventItem((EventItem)target);
} else{
list[0] = new ToDoItem(target);
}
if(description!=null && !description.isEmpty()){
target.setDescription(description);
}
if(ToDoItem.isValidPriority(priority)){
target.setPriority(priority);
}
if(start!=null && !(target instanceof EventItem)){
target = new EventItem(target, start);
removeItem(target.getId());
addItem(target);
} else if(start!=null){
((EventItem)target).setStartDate(start);
}
if(end!=null){
if(target instanceof EventItem){
((EventItem)target).setEndDate(end);
} else if(target instanceof TaskItem){
((TaskItem)target).setDeadline(end);
} else{
target = new TaskItem(target, end);
removeItem(target.getId());
addItem(target);
}
}
list[1] = target;
try {
DataFileReadWrite.writeListToFile(mainList, "output.txt");
} catch (IOException e) {
}
return list;
}
/**
* Gets a list of ToDoItem objects whose description contains this keyword.
* @param keyword The keyword to search in the objects.
* @return An array of ToDoItem objects containing all of the given values or null
* if the list is empty.
*/
public ToDoItem[] searchItems(String keyword, int priority, MyDate start, MyDate end, OptionType[] optList){
ArrayList<ToDoItem> temp = mainList;;
if(keyword!=null && !keyword.isEmpty()){
temp = searchByKeyword(temp, keyword);
}
if(priority!=-1){
temp = searchByPriority(temp, priority);
}
if(start!=null){
temp = searchByStartDate(temp, start);
}
if(end!=null){
temp = searchByEndDate(temp, end);
}
if(temp.isEmpty()){
return null;
}
if(optList!=null){
int size = optList.length;
while(size
sortListByOption(temp, optList[size]);
}
}
return temp.toArray(new ToDoItem[temp.size()]);
}
/**
* Gets a list of items with the keyword in their description from the given list.
* @param list The list to perform search on.
* @return An ArrayList of ToDoItem objects whose description contain the keyword.
*/
private ArrayList<ToDoItem> searchByKeyword(ArrayList<ToDoItem> list, String keyword){
assert list!=null && keyword!=null && !keyword.isEmpty();
ArrayList<ToDoItem> temp = new ArrayList<ToDoItem>();
for(ToDoItem i: list){
if(i.getDescription().contains(keyword)){
temp.add(i);
}
}
return temp;
}
/**
* Gets a list of items with the given priority from the given list.
* @param list The list to perform search on.
* @param priority The priority value used to filter the items.
* @return An Arraylist of ToDoItem objects with the given priority level.
*/
private ArrayList<ToDoItem> searchByPriority(ArrayList<ToDoItem> list, int priority){
assert list!=null && priority>=0;
ArrayList<ToDoItem> temp = new ArrayList<ToDoItem>();
for(ToDoItem i: list){
if(i.getPriority()==priority){
temp.add(i);
}
}
return temp;
}
/**
* Gets a list of items with the given start date drom the given list.
* @param list The list to perform search on.
* @param start The start date value to search.
* @return An ArrayList of ToDoItem objects with the given start date.
*/
private ArrayList<ToDoItem> searchByStartDate(ArrayList<ToDoItem> list, MyDate start){
assert start!=null && list!=null;
ArrayList<ToDoItem> temp = new ArrayList<>();
for(ToDoItem i: list){
if((i instanceof EventItem) && ((EventItem)i).getStartDate().equals(start)){
temp.add(i);
}
}
return temp;
}
/**
* Gets a list of items with the given end date/deadline.
* @param list The list to perform search on.
* @param end The end date/deadline value to search.
*/
private ArrayList<ToDoItem> searchByEndDate(ArrayList<ToDoItem> list, MyDate end){
assert list!=null && end!=null;
ArrayList<ToDoItem> temp = new ArrayList<ToDoItem>();
for(ToDoItem i: list){
if(((i instanceof EventItem) && ((EventItem)i).getEndDate().equals(end)) ||
((i instanceof TaskItem) && ((TaskItem)i).getDeadline().equals(end))){
temp.add(i);
}
}
return temp;
}
/**
* Gets the list of tasks and events in an array sorted according to optionsList.
* @param optionsList The list of data fields to sort the list by in descending order of precedence
* or null if no other sorting criteria is required.
* Eg. {a, b} means most importantly, sort by a. For all items with same value of a, sort by b.
* @return An array of ToDoItem objects sorted according to sorting criteria or null
* if the storage has no item.
*/
public ToDoItem[] getList(OptionType[] optionsList){
if(mainList.isEmpty()){
return null;
}
Collections.sort(mainList);
if(optionsList!=null){
int size = optionsList.length;
while(size
sortListByOption(mainList, optionsList[size]);
}
}
return mainList.toArray(new ToDoItem[mainList.size()]);
}
/**
* Sorts the given list by the given option type.
* @param list The list to sort.
* @optType The option to sort the list by.
*/
private void sortListByOption(ArrayList<ToDoItem> list, OptionType optType){
switch(optType){
case PRIORITY: Collections.sort(list, Collections.reverseOrder(new PriorityComparator()));
break;
case START_TIME: Collections.sort(list, new StartDateComparator());
break;
case END_TIME: Collections.sort(list, new EndDateComparator());
break;
default: break;
}
}
/**
* Returns a list of events sorted in lexicographical order of their ID.
* @return An array of EventItem objects sorted in lexicographical order of their ID
* or null if the list is empty.
*/
public EventItem[] getEvents(){
if(eventList.isEmpty()){
return null;
}
return eventList.toArray(new EventItem[eventList.size()]);
}
/**
* Returns a list of deadline tasks in lexicographical order or their ID.
* @return An array of TaskItem objects sorted in lexicographical order of their ID
* or null if the list is empty.
*/
public TaskItem[] getTasks(){
if(taskList.isEmpty()){
return null;
}
return taskList.toArray(new TaskItem[taskList.size()]);
}
/**
* Gets the list of events sorted in ascending order of start date.
* @return An array of EventItem sorted in ascending order by start date
* or null if the list is empty.
* Implementation is currently only a stub, to be properly implemented when use case requirements
* are confirmed.
*/
public EventItem[] getEventsSortedByStartDate(){
if(eventList.isEmpty()){
return null;
}
Collections.sort(eventList);
return eventList.toArray(new EventItem[eventList.size()]);
}
/**
* Gets the list of events sorted in ascending order of end date.
* @return An array of EventItem sorted in ascending order by start date
* or null if the list is empty.
* Implementation is currently only a stub, to be properly implemented when use case requirements
* are confirmed.
*/
public EventItem[] getEventsSortedByEndDate(){
if(eventList.isEmpty()){
return null;
}
Collections.sort(eventList);
return eventList.toArray(new EventItem[eventList.size()]);
}
/**
* Gets the list of deadline tasks sorted in ascending order of their deadlines.
* @return An array of TaskItem objects sorted in ascending order of their deadlines.
* Implementation is currently only a stub, to be properly implemented when use case requirements
* are confirmed.
*/
public TaskItem[] getTasksSortedByDeadline(){
if(taskList.isEmpty()){
return null;
}
Collections.sort(taskList);
return taskList.toArray(new TaskItem[taskList.size()]);
}
public ToDoItem removeItem(String id){
assert id!=null && !id.isEmpty();
ToDoItem temp = searchTree.remove(id);
if(temp==null){
throw new IllegalArgumentException("No such ID.");
}
mainList.remove(temp);
if(temp instanceof TaskItem){
taskList.remove((TaskItem)temp);
} else if(temp instanceof EventItem){
eventList.remove((EventItem)temp);
} else{
toDoItemList.remove(temp);
}
try {
DataFileReadWrite.writeListToFile(mainList, "output.txt");
} catch (IOException e) {
}
return temp;
}
public ToDoItem completeItem(String id){
ToDoItem item = removeItem(id);
if(item!=null){
if(item instanceof EventItem){
completedList.push(new EventItem((EventItem)item));
} else if(item instanceof TaskItem){
completedList.push(new TaskItem((TaskItem)item));
} else{
completedList.push(new ToDoItem(item));
}
item.setStatus(ToDoItem.STATUS_COMPLETED);
}
return item;
}
/**
* Unmark the most recent item that is marked completed.
* @return The ToDoItem object that is unmarked from completed list.
*/
public ToDoItem undoComplete(){
ToDoItem temp = completedList.pop();
addItem(temp);
return temp;
}
}
|
package org.umlg.sqlg.test.gremlincompile;
import org.apache.tinkerpop.gremlin.AbstractGremlinTest;
import org.apache.tinkerpop.gremlin.process.traversal.Path;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.__;
import org.apache.tinkerpop.gremlin.process.traversal.step.util.MapHelper;
import org.apache.tinkerpop.gremlin.structure.Graph;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.apache.tinkerpop.gremlin.structure.io.GraphReader;
import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoIo;
import org.apache.tinkerpop.gremlin.structure.io.gryo.GryoReader;
import org.junit.Assert;
import org.junit.Test;
import org.umlg.sqlg.test.BaseTest;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class TestRepeatStep extends BaseTest {
// @Test
// //This is not optimized
// public void testUntilRepeat() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// this.sqlgGraph.tx().commit();
// List<Vertex> t = this.sqlgGraph.traversal().V().repeat(__.out()).until(__.hasLabel("C")).toList();
// Assert.assertEquals(6, t.size());
// Assert.assertTrue(t.remove(c1));
// Assert.assertTrue(t.remove(c1));
// Assert.assertTrue(t.remove(c2));
// Assert.assertTrue(t.remove(c2));
// Assert.assertTrue(t.remove(c3));
// Assert.assertTrue(t.remove(c3));
// Assert.assertTrue(t.isEmpty());
// t = this.sqlgGraph.traversal().V().until(__.hasLabel("C")).repeat(__.out()).toList();
// Assert.assertEquals(9, t.size());
// Assert.assertTrue(t.remove(c1));
// Assert.assertTrue(t.remove(c1));
// Assert.assertTrue(t.remove(c1));
// Assert.assertTrue(t.remove(c2));
// Assert.assertTrue(t.remove(c2));
// Assert.assertTrue(t.remove(c2));
// Assert.assertTrue(t.remove(c3));
// Assert.assertTrue(t.remove(c3));
// Assert.assertTrue(t.remove(c3));
// Assert.assertTrue(t.isEmpty());
// @Test
// //This is not optimized
// public void testRepeatWithUnoptimizableInternalSteps() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// this.sqlgGraph.tx().commit();
// List<Map<String, Vertex>> t = this.sqlgGraph.traversal().V().repeat(__.groupCount("m").by("name").out()).times(2).<Map<String, Vertex>>cap("m").toList();
// Assert.assertEquals(1, t.size());
// Assert.assertTrue(t.get(0).containsKey("a1"));
// Assert.assertTrue(t.get(0).containsKey("b1"));
// Assert.assertTrue(t.get(0).containsKey("b1"));
// Assert.assertTrue(t.get(0).containsKey("b1"));
// Assert.assertTrue(t.get(0).containsKey("c1"));
// Assert.assertTrue(t.get(0).containsKey("c1"));
// Assert.assertTrue(t.get(0).containsKey("c1"));
// Assert.assertEquals(1l, t.get(0).get("a1"));
// Assert.assertEquals(2l, t.get(0).get("b1"));
// Assert.assertEquals(2l, t.get(0).get("b2"));
// Assert.assertEquals(2l, t.get(0).get("b3"));
// Assert.assertEquals(2l, t.get(0).get("c1"));
// Assert.assertEquals(2l, t.get(0).get("c2"));
// Assert.assertEquals(2l, t.get(0).get("c3"));
// @Test
// //This is not optimized
// public void testRepeatNoLimit() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().repeat(__.out("ab").out("bc")).toList();
// Assert.assertTrue(vertices.isEmpty());
// @Test
// public void testRepeat() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").repeat(__.out("ab").out("bc")).times(1).toList();
// Assert.assertEquals(3, vertices.size());
// Assert.assertTrue(vertices.contains(c1));
// Assert.assertTrue(vertices.contains(c2));
// Assert.assertTrue(vertices.contains(c3));
// vertices = this.sqlgGraph.traversal().V().hasLabel("A").repeat(__.out("ab", "bc")).times(2).toList();
// Assert.assertEquals(3, vertices.size());
// Assert.assertTrue(vertices.contains(c1));
// Assert.assertTrue(vertices.contains(c2));
// Assert.assertTrue(vertices.contains(c3));
//// GraphTraversal<Vertex, Vertex> gt = this.sqlgGraph.traversal().V().hasLabel("A").repeat(__.out("ab", "bc")).emit().times(2);
//// GraphTraversal<Vertex, Vertex> gt = this.sqlgGraph.traversal().V().hasLabel("A").repeat(__.out("ab", "bc")).times(2).emit();
//// GraphTraversal<Vertex, Vertex> gt = this.sqlgGraph.traversal().V().hasLabel("A").emit().repeat(__.out("ab", "bc")).times(2);
// @Test
// public void testRepeatWithEmitFirst() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "name", "d1");
// Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "name", "d2");
// Vertex d3 = this.sqlgGraph.addVertex(T.label, "D", "name", "d3");
// c1.addEdge("cd", d1);
// c1.addEdge("cd", d2);
// c1.addEdge("cd", d3);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").emit().repeat(__.out("ab", "bc", "cd")).times(3).toList();
// for (Vertex vertex : vertices) {
// System.out.println(vertex.value("name").toString());
// Assert.assertEquals(14, vertices.size());
// Assert.assertTrue(vertices.remove(a1));
// Assert.assertTrue(vertices.remove(b1));
// Assert.assertTrue(vertices.remove(b2));
// Assert.assertTrue(vertices.remove(b3));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c2));
// Assert.assertTrue(vertices.remove(c3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.isEmpty());
// @Test
// public void testRepeatWithEmitFirstPath() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "name", "d1");
// Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "name", "d2");
// Vertex d3 = this.sqlgGraph.addVertex(T.label, "D", "name", "d3");
// c1.addEdge("cd", d1);
// c1.addEdge("cd", d2);
// c1.addEdge("cd", d3);
// this.sqlgGraph.tx().commit();
// List<Path> paths = this.sqlgGraph.traversal().V().hasLabel("A").emit().repeat(__.out("ab", "bc", "cd")).times(3).path().toList();
// Assert.assertEquals(14, paths.size());
// for (Path path : paths) {
// System.out.println(path);
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 1 && p.get(0).equals(a1)));
// paths.remove(paths.stream().filter(p -> p.size() == 1 && p.get(0).equals(a1)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 2 && p.get(0).equals(a1) && p.get(1).equals(b1)));
// paths.remove(paths.stream().filter(p -> p.size() == 2 && p.get(0).equals(a1) && p.get(1).equals(b1)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 2 && p.get(0).equals(a1) && p.get(1).equals(b2)));
// paths.remove(paths.stream().filter(p -> p.size() == 2 && p.get(0).equals(a1) && p.get(1).equals(b2)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 2 && p.get(0).equals(a1) && p.get(1).equals(b3)));
// paths.remove(paths.stream().filter(p -> p.size() == 2 && p.get(0).equals(a1) && p.get(1).equals(b3)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1)));
// paths.remove(paths.stream().filter(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1)));
// paths.remove(paths.stream().filter(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c2)));
// paths.remove(paths.stream().filter(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c2)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c3)));
// paths.remove(paths.stream().filter(p -> p.size() == 3 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c3)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d1)));
// paths.remove(paths.stream().filter(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d1)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d1)));
// paths.remove(paths.stream().filter(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d1)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d2)));
// paths.remove(paths.stream().filter(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d2)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d2)));
// paths.remove(paths.stream().filter(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d2)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d3)));
// paths.remove(paths.stream().filter(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d3)).findAny().get());
// Assert.assertTrue(paths.stream().anyMatch(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d3)));
// paths.remove(paths.stream().filter(p -> p.size() == 4 && p.get(0).equals(a1) && p.get(1).equals(b1) && p.get(2).equals(c1) && p.get(3).equals(d3)).findAny().get());
// Assert.assertTrue(paths.isEmpty());
// @Test
// public void testRepeatWithEmitLast() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "name", "d1");
// Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "name", "d2");
// Vertex d3 = this.sqlgGraph.addVertex(T.label, "D", "name", "d3");
// c1.addEdge("cd", d1);
// c1.addEdge("cd", d2);
// c1.addEdge("cd", d3);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").repeat(__.out("ab", "bc", "cd")).emit().times(3).toList();
// for (Vertex vertex : vertices) {
// System.out.println(vertex.value("name").toString());
// Assert.assertEquals(13, vertices.size());
// Assert.assertTrue(vertices.remove(b1));
// Assert.assertTrue(vertices.remove(b2));
// Assert.assertTrue(vertices.remove(b3));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c2));
// Assert.assertTrue(vertices.remove(c3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.isEmpty());
// @Test
// public void testRepeatWithEmitLastShouldNotLeftJoinFirstDegree() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex a2 = this.sqlgGraph.addVertex(T.label, "A", "name", "a2");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "name", "d1");
// Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "name", "d2");
// Vertex d3 = this.sqlgGraph.addVertex(T.label, "D", "name", "d3");
// c1.addEdge("cd", d1);
// c1.addEdge("cd", d2);
// c1.addEdge("cd", d3);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").repeat(__.out("ab", "bc", "cd")).emit().times(3).toList();
// for (Vertex vertex : vertices) {
// System.out.println(vertex.value("name").toString());
// Assert.assertEquals(13, vertices.size());
// Assert.assertTrue(vertices.remove(b1));
// Assert.assertTrue(vertices.remove(b2));
// Assert.assertTrue(vertices.remove(b3));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c2));
// Assert.assertTrue(vertices.remove(c3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.isEmpty());
// @Test
// public void testRepeatWithEmitLastWithTimesFirst() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// Vertex b3 = this.sqlgGraph.addVertex(T.label, "B", "name", "b3");
// a1.addEdge("ab", b1);
// a1.addEdge("ab", b2);
// a1.addEdge("ab", b3);
// Vertex c1 = this.sqlgGraph.addVertex(T.label, "C", "name", "c1");
// Vertex c2 = this.sqlgGraph.addVertex(T.label, "C", "name", "c2");
// Vertex c3 = this.sqlgGraph.addVertex(T.label, "C", "name", "c3");
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c1);
// b1.addEdge("bc", c2);
// b1.addEdge("bc", c3);
// Vertex d1 = this.sqlgGraph.addVertex(T.label, "D", "name", "d1");
// Vertex d2 = this.sqlgGraph.addVertex(T.label, "D", "name", "d2");
// Vertex d3 = this.sqlgGraph.addVertex(T.label, "D", "name", "d3");
// c1.addEdge("cd", d1);
// c1.addEdge("cd", d2);
// c1.addEdge("cd", d3);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").times(3).repeat(__.out("ab", "bc", "cd")).emit().toList();
// for (Vertex vertex : vertices) {
// System.out.println(vertex.value("name").toString());
// Assert.assertEquals(19, vertices.size());
// Assert.assertTrue(vertices.remove(b1));
// Assert.assertTrue(vertices.remove(b2));
// Assert.assertTrue(vertices.remove(b3));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c1));
// Assert.assertTrue(vertices.remove(c2));
// Assert.assertTrue(vertices.remove(c3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.remove(d1));
// Assert.assertTrue(vertices.remove(d2));
// Assert.assertTrue(vertices.remove(d3));
// Assert.assertTrue(vertices.isEmpty());
@Test
public void g_V_repeatXoutX_timesX2X_emit_path() throws IOException {
Graph graph = this.sqlgGraph;
final GraphReader reader = GryoReader.build()
.mapper(graph.io(GryoIo.build()).mapper().create())
.create();
try (final InputStream stream = AbstractGremlinTest.class.getResourceAsStream("/tinkerpop-modern.kryo")) {
reader.readGraph(stream, graph);
}
assertModernGraph(graph, true, false);
GraphTraversalSource g = graph.traversal();
final List<Traversal<Vertex, Path>> traversals = new ArrayList<>();
Traversal t = g.V().repeat(__.out()).times(2).emit().path();
traversals.add(t);
traversals.forEach(traversal -> {
printTraversalForm(traversal);
final Map<Integer, Long> pathLengths = new HashMap<>();
int counter = 0;
while (traversal.hasNext()) {
counter++;
MapHelper.incr(pathLengths, traversal.next().size(), 1l);
}
Assert.assertEquals(2, pathLengths.size());
Assert.assertEquals(8, counter);
Assert.assertEquals(new Long(6), pathLengths.get(2));
Assert.assertEquals(new Long(2), pathLengths.get(3));
});
}
// @Test
// public void testDuplicatePath() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex a2 = this.sqlgGraph.addVertex(T.label, "A", "name", "a2");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// Vertex b2 = this.sqlgGraph.addVertex(T.label, "B", "name", "b2");
// a1.addEdge("ab", b1);
// b1.addEdge("ba", a2);
// a2.addEdge("ab", b2);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().repeat(__.out("ab", "ba")).times(2).emit().toList();
// @Test
// public void testOnLeftJoinOnLeaveNode() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex a2 = this.sqlgGraph.addVertex(T.label, "A", "name", "a2");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// a1.addEdge("ab", b1);
// a2.addEdge("ab", b1);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").emit().repeat(__.out("ab")).times(1).toList();
// Assert.assertEquals(4, vertices.size());
// @Test
// public void testOnDuplicatePaths() {
// Vertex a1 = this.sqlgGraph.addVertex(T.label, "A", "name", "a1");
// Vertex a2 = this.sqlgGraph.addVertex(T.label, "A", "name", "a2");
// Vertex b1 = this.sqlgGraph.addVertex(T.label, "B", "name", "b1");
// a1.addEdge("ab", b1);
// b1.addEdge("ba", a2);
// this.sqlgGraph.tx().commit();
// List<Vertex> vertices = this.sqlgGraph.traversal().V().hasLabel("A").emit().repeat(__.out("ab", "ba")).times(2).toList();
// Assert.assertEquals(4, vertices.size());
}
|
package de.bastiankrol.startexplorer.preferences;
import static de.bastiankrol.startexplorer.util.Util.*;
import org.eclipse.jface.preference.IPreferenceStore;
import org.eclipse.jface.preference.PreferencePage;
import org.eclipse.jface.preference.PreferenceStore;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.layout.RowLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPreferencePage;
import de.bastiankrol.startexplorer.Activator;
import de.bastiankrol.startexplorer.preferences.SeparatorData.SeparatorType;
public class StartExplorerPreferencePage extends PreferencePage implements
IWorkbenchPreferencePage
{
private static final int MAX_COLUMN_WIDTH = 300;
private PreferenceModel preferenceModel;
private PreferenceUtil preferenceUtil;
private Composite parent;
private Table tableCommands;
private Text textCustomCopyResourceSeparatorString;
private Button radioButtonUnixLinebreak;
private Button radioButtonWindowsLinebreak;
private Button radioButtonMacLinebreak;
private Button radioButtonTab;
private Button radioButtonCustomSeparator;
/**
* {@inheritDoc}
*
* @see org.eclipse.ui.IWorkbenchPreferencePage#init(org.eclipse.ui.IWorkbench)
*/
public void init(IWorkbench workbench)
{
if (false)
{
workbench.getClass();
}
this.preferenceModel = new PreferenceModel();
this.preferenceUtil = new PreferenceUtil();
}
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.preference.PreferencePage#doGetPreferenceStore()
*/
@Override
protected IPreferenceStore doGetPreferenceStore()
{
return this.getPreferenceUtil().retrievePreferenceStore();
}
PreferenceUtil getPreferenceUtil()
{
return this.preferenceUtil;
}
/**
* Initializes the preference model by loading the stored preferences from the
* preference store.
*/
private void initializeValues()
{
this.preferenceModel.loadPreferencesFromStore(this.getPreferenceUtil());
this.refreshViewFromModel();
}
/**
* Store values to preference store
*/
private void storeValues()
{
IPreferenceStore store = getPreferenceStore();
this.storeValues(store);
}
/**
* Store the values to <code>store</code>.
*
* @param store the {@link IPreferenceStore} to store the preferences in.
*/
protected void storeValues(IPreferenceStore store)
{
this.preferenceModel.storeValues(store);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.preference.PreferencePage#performDefaults()
*/
@Override
protected void performDefaults()
{
super.performDefaults();
this.initializeDefaults();
}
/**
* Initializes the preference model with the defaults, if no preferences have
* been set by the user yet.
*/
protected void initializeDefaults()
{
this.preferenceModel.initializeFromDefaults();
this.refreshViewFromModel();
}
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.preference.PreferencePage#performOk()
*/
@Override
public boolean performOk()
{
this.storeValues();
Activator.getDefault().savePluginPreferences();
return true;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.preference.PreferencePage#createContents(org.eclipse.swt.widgets.Composite)
*/
@Override
protected Control createContents(Composite parent)
{
// ?: Do we need to create a new composite as a child of parent and return
// this new composite instead of manipulating parent directly?
// do basic layout for preference page
this.parent = parent;
GridLayout gridLayoutParent = new GridLayout();
gridLayoutParent.numColumns = 2;
this.parent.setLayout(gridLayoutParent);
// Upper left part: command config table
this.tableCommands = this.createTable(this.parent);
// Upper right part: buttons to control the command config table (add, edit,
// delete, up, down, ...)
this.createControlButtonSection();
// Lower part: section for configurable separator for the copy resource path
// command
this.createCopyResourcePathSeparatorSection();
// fetch models from value and put them into the gui elements
this.initializeValues();
return this.parent;
}
/**
* Creates the table for the preference page.
*
* @param parent parent composite, the table will be a child of
* <code>parent</code>
* @return
*/
private Table createTable(Composite parent)
{
String[] titles = { "Command", "Name/Resources", "Name/Text Selection" };
Table table =
new Table(parent, SWT.MULTI | SWT.BORDER | SWT.FULL_SELECTION);
GridData data = new GridData(SWT.FILL, SWT.FILL, true, true);
table.setLayoutData(data);
table.setLinesVisible(true);
table.setHeaderVisible(true);
for (int i = 0; i < titles.length; i++)
{
TableColumn column = new TableColumn(table, SWT.NONE);
column.setText(titles[i]);
}
return table;
}
private void createControlButtonSection()
{
Composite compositeButtonColumn = new Composite(this.parent, SWT.NONE);
compositeButtonColumn.setLayoutData(new GridData(
GridData.VERTICAL_ALIGN_BEGINNING));
RowLayout rowLayoutButtonColumn = new RowLayout();
rowLayoutButtonColumn.type = SWT.VERTICAL;
rowLayoutButtonColumn.pack = false;
compositeButtonColumn.setLayout(rowLayoutButtonColumn);
createButton(compositeButtonColumn, "Add").addSelectionListener(
new EventlessSelectionAdapter()
{
public void widgetSelected()
{
StartExplorerPreferencePage.this.buttonAddPressed();
}
});
createButton(compositeButtonColumn, "Edit").addSelectionListener(
new EventlessSelectionAdapter()
{
public void widgetSelected()
{
StartExplorerPreferencePage.this.buttonEditPressed();
}
});
createButton(compositeButtonColumn, "Remove").addSelectionListener(
new EventlessSelectionAdapter()
{
public void widgetSelected()
{
StartExplorerPreferencePage.this.buttonRemovePressed();
}
});
createButton(compositeButtonColumn, "Up").addSelectionListener(
new EventlessSelectionAdapter()
{
public void widgetSelected()
{
StartExplorerPreferencePage.this.buttonUpPressed();
}
});
createButton(compositeButtonColumn, "Down").addSelectionListener(
new EventlessSelectionAdapter()
{
public void widgetSelected()
{
StartExplorerPreferencePage.this.buttonDownPressed();
}
});
}
private void createCopyResourcePathSeparatorSection()
{
Label labelSeparator =
new Label(this.parent, SWT.SEPARATOR | SWT.HORIZONTAL);
GridData gridDataSeparator = new GridData(SWT.FILL, SWT.FILL, true, false);
gridDataSeparator.horizontalSpan = 2;
labelSeparator.setLayoutData(gridDataSeparator);
Composite compositeCopyResourcePathSeparator =
new Composite(this.parent, SWT.NULL);
GridData gridDataComposite = new GridData(SWT.FILL, SWT.FILL, true, false);
gridDataComposite.horizontalSpan = 2;
compositeCopyResourcePathSeparator.setLayoutData(gridDataComposite);
GridLayout gridLayoutComposite = new GridLayout();
gridLayoutComposite.numColumns = 2;
compositeCopyResourcePathSeparator.setLayout(gridLayoutComposite);
Label labelHeadline =
new Label(compositeCopyResourcePathSeparator, SWT.NONE);
labelHeadline.setText("Separator for \"Copy resource path to clipboard\"");
GridData gridDataLabelHeadline =
new GridData(SWT.FILL, SWT.FILL, true, false);
gridDataLabelHeadline.horizontalSpan = 2;
labelHeadline.setLayoutData(gridDataLabelHeadline);
this.radioButtonUnixLinebreak =
new Button(compositeCopyResourcePathSeparator, SWT.RADIO);
this.radioButtonUnixLinebreak.setText("Unix Linebreak (LF)");
this.radioButtonUnixLinebreak.addSelectionListener(new SelectionAdapter()
{
@Override
public void widgetSelected(SelectionEvent e)
{
preferenceModel.getSeparatorData().setSeparatorType(SeparatorType.LF);
refreshSeparatorStuffFromModel();
}
});
this.radioButtonWindowsLinebreak =
new Button(compositeCopyResourcePathSeparator, SWT.RADIO);
this.radioButtonWindowsLinebreak.setText("Windows Linebreak (CR+LF)");
this.radioButtonWindowsLinebreak
.addSelectionListener(new SelectionAdapter()
{
@Override
public void widgetSelected(SelectionEvent e)
{
preferenceModel.getSeparatorData().setSeparatorType(
SeparatorType.CRLF);
refreshSeparatorStuffFromModel();
}
});
this.radioButtonMacLinebreak =
new Button(compositeCopyResourcePathSeparator, SWT.RADIO);
this.radioButtonMacLinebreak.setText("Mac Linebreak (CR)");
this.radioButtonMacLinebreak.addSelectionListener(new SelectionAdapter()
{
@Override
public void widgetSelected(SelectionEvent e)
{
preferenceModel.getSeparatorData().setSeparatorType(SeparatorType.CR);
refreshSeparatorStuffFromModel();
}
});
this.radioButtonTab =
new Button(compositeCopyResourcePathSeparator, SWT.RADIO);
this.radioButtonTab.setText("Tab");
this.radioButtonTab.addSelectionListener(new SelectionAdapter()
{
@Override
public void widgetSelected(SelectionEvent e)
{
preferenceModel.getSeparatorData().setSeparatorType(SeparatorType.TAB);
refreshSeparatorStuffFromModel();
}
});
this.radioButtonCustomSeparator =
new Button(compositeCopyResourcePathSeparator, SWT.RADIO);
this.radioButtonCustomSeparator.setText("Custom String: ");
this.radioButtonCustomSeparator.addSelectionListener(new SelectionAdapter()
{
@Override
public void widgetSelected(SelectionEvent e)
{
preferenceModel.getSeparatorData().setSeparatorType(
SeparatorType.CUSTOM);
refreshSeparatorStuffFromModel();
}
});
this.textCustomCopyResourceSeparatorString =
new Text(compositeCopyResourcePathSeparator, SWT.SINGLE | SWT.BORDER);
GridData layoutDataText = new GridData();
layoutDataText.widthHint = 50;
this.textCustomCopyResourceSeparatorString.setLayoutData(layoutDataText);
this.textCustomCopyResourceSeparatorString
.addModifyListener(new ModifyListener()
{
public void modifyText(ModifyEvent event)
{
String content = textCustomCopyResourceSeparatorString.getText();
preferenceModel.getSeparatorData()
.setCustomSeparatorString(content);
}
});
}
/**
* Refreshes the page from the preference model
*/
private void refreshViewFromModel()
{
this.tableCommands.removeAll();
for (CommandConfig commandConfig : this.preferenceModel
.getCommandConfigList())
{
TableItem item = new TableItem(this.tableCommands, SWT.NONE);
item.setText(0, commandConfig.getCommand());
item.setText(1, commandConfig.getNameForResourcesMenu());
if (!commandConfig.isEnabledForResourcesMenu())
{
item.setForeground(1, Display.getCurrent().getSystemColor(
SWT.COLOR_GRAY));
}
item.setText(2, commandConfig.getNameForTextSelectionMenu());
if (!commandConfig.isEnabledForTextSelectionMenu())
{
item.setForeground(2, Display.getCurrent().getSystemColor(
SWT.COLOR_GRAY));
}
}
for (int i = 0; i < this.tableCommands.getColumnCount(); i++)
{
TableColumn column = this.tableCommands.getColumn(i);
column.pack();
if (column.getWidth() > MAX_COLUMN_WIDTH)
{
column.setWidth(MAX_COLUMN_WIDTH);
}
}
this.refreshSeparatorStuffFromModel();
}
private void refreshSeparatorStuffFromModel()
{
this.radioButtonUnixLinebreak.setSelection(false);
this.radioButtonWindowsLinebreak.setSelection(false);
this.radioButtonMacLinebreak.setSelection(false);
this.radioButtonTab.setSelection(false);
this.radioButtonCustomSeparator.setSelection(false);
SeparatorType copyResourceSeparator =
this.preferenceModel.getSeparatorData().getSeparatorType();
switch (copyResourceSeparator)
{
case CR:
this.radioButtonMacLinebreak.setSelection(true);
this.textCustomCopyResourceSeparatorString.setEnabled(false);
break;
case CRLF:
this.radioButtonWindowsLinebreak.setSelection(true);
this.textCustomCopyResourceSeparatorString.setEnabled(false);
break;
case LF:
this.radioButtonUnixLinebreak.setSelection(true);
this.textCustomCopyResourceSeparatorString.setEnabled(false);
break;
case TAB:
this.radioButtonTab.setSelection(true);
this.textCustomCopyResourceSeparatorString.setEnabled(false);
break;
case CUSTOM:
this.radioButtonCustomSeparator.setSelection(true);
this.textCustomCopyResourceSeparatorString.setEnabled(true);
break;
}
String customCopyResourceSeparatorString =
this.preferenceModel.getSeparatorData().getCustomSeparatorString();
// Won't happen when running in Eclipse, just for testing
if (customCopyResourceSeparatorString == null)
{
customCopyResourceSeparatorString = "";
}
this.textCustomCopyResourceSeparatorString
.setText(customCopyResourceSeparatorString);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.preference.PreferencePage#doComputeSize()
*/
@Override
public Point doComputeSize()
{
return super.doComputeSize();
}
private Button createButton(Composite parent, String label)
{
Button button = new Button(parent, SWT.PUSH);
button.setText(label);
return button;
}
private void buttonAddPressed()
{
new EditCommandConfigPane(this.parent.getShell(), this.preferenceModel
.getCommandConfigList()).open();
this.refreshViewFromModel();
}
private void buttonEditPressed()
{
int selectionIndex = this.tableCommands.getSelectionIndex();
if (selectionIndex != -1)
{
new EditCommandConfigPane(this.parent.getShell(), this.preferenceModel
.getCommandConfigList().get(selectionIndex)).open();
}
this.refreshViewFromModel();
}
private void buttonRemovePressed()
{
int[] selectionIndices = this.tableCommands.getSelectionIndices();
// remove multiple selected indices from end to start
for (int i = selectionIndices.length - 1; i >= 0; i
{
int selectedIndex = selectionIndices[i];
this.preferenceModel.getCommandConfigList().remove(selectedIndex);
}
this.refreshViewFromModel();
}
private void buttonUpPressed()
{
int[] selectionIndices = this.tableCommands.getSelectionIndices();
boolean changed =
moveUpInList(this.preferenceModel.getCommandConfigList(),
selectionIndices);
this.refreshViewFromModel();
if (changed)
{
for (int i = 0; i < selectionIndices.length; i++)
{
selectionIndices[i] -= 1;
}
}
this.tableCommands.setSelection(selectionIndices);
}
private void buttonDownPressed()
{
int[] selectionIndices = this.tableCommands.getSelectionIndices();
boolean changed =
moveDownInList(this.preferenceModel.getCommandConfigList(),
selectionIndices);
this.refreshViewFromModel();
if (changed)
{
for (int i = 0; i < selectionIndices.length; i++)
{
selectionIndices[i] += 1;
}
}
this.tableCommands.setSelection(selectionIndices);
}
private abstract class EventlessSelectionAdapter extends SelectionAdapter
{
/**
* {@inheritDoc}
*
* @see org.eclipse.swt.events.SelectionAdapter#widgetSelected(org.eclipse.swt.events.SelectionEvent)
*/
public final void widgetSelected(SelectionEvent event)
{
if (false)
{
event.getClass();
}
this.widgetSelected();
}
abstract void widgetSelected();
}
/**
* Just for testing the page layout.
*
* @param args ...
*/
public static void main(String[] args)
{
Display display = Display.getDefault();
final Shell shell = new Shell(display);
new StartExplorerPreferencePage()
{
{
this.init(null);
this.createContents(shell);
this.initializeDefaults();
}
protected IPreferenceStore doGetPreferenceStore()
{
IPreferenceStore preferenceStore = new PreferenceStore();
return preferenceStore;
}
@Override
PreferenceUtil getPreferenceUtil()
{
return new PreferenceUtil()
{
@Override
void loadPreferencesFromStoreIntoPreferenceModel(
PreferenceModel preferenceModel)
{
preferenceModel.getSeparatorData().setSeparatorData(
SeparatorType.LF, null);
}
};
}
};
shell.open();
while (!shell.isDisposed())
{
if (!display.readAndDispatch())
{
display.sleep();
}
}
}
}
|
package main.app.controllers;
import com.jfoenix.controls.JFXButton;
import com.jfoenix.controls.JFXSlider;
import com.jfoenix.controls.JFXTextField;
import javafx.fxml.FXML;
import javafx.scene.control.Label;
import javafx.scene.control.Tab;
import main.app.SerialCommunication;
import java.util.List;
public class SilnikController {
@FXML
private Tab silnikTab;
@FXML
private Label actualPosLbl;
@FXML
private Label targetPosLbl;
@FXML
private Label choosePosLbl;
@FXML
private Label mvLbl;
@FXML
private JFXTextField actualPosField;
@FXML
private JFXTextField targetPosField;
@FXML
private JFXSlider posSlider;
@FXML
private JFXButton mvPlusBtn;
@FXML
private JFXButton mvMinusBtn;
@FXML
private JFXButton chngPosBtn;
public SerialCommunication serialCommunication = new SerialCommunication();
public enum JRKCommands {
JRK_GET_TARGET(0xA3),
JRK_GET_FEEDBACK(0xA7),
JRK_GET_STATUS(0xB3),
JRK_SET_TARGET(0xC0),
JRK_SET_MOTOROFF(0xFF);
private int com;
JRKCommands(int com){
this.com = com;
}
};
public enum JRKStatus{
JRK_STATUS_AWAITING (0x0001),
JRK_STATUS_NOPOWER (0x0002),
JRK_STATUS_DRVERR (0x0004),
JRK_STATUS_INVINPUT (0x0008),
JRK_STATUS_DISCINPUT (0x0010),
JRK_STATUS_DISCFEEDBACK (0x0020),
JRK_STATUS_OVERCURRENT (0x0040),
JRK_STATUS_SERERROR (0x0080),
JRK_STATUS_SEROVERRUN (0x0100),
JRK_STATUS_RXOVERRUN (0x0200),
JRK_STATUS_CRCERR (0x0400),
JRK_STATUS_PROTOERR (0x0800),
JRK_STATUS_TIMEOUT (0x1000),
JRK_STATUS_RESERVED (0x7000);
private int com;
JRKStatus(int com){
this.com = com;
}
};
public void openCommunication(){
List<String> portNames;
portNames = serialCommunication.getPortNames();
serialCommunication.connect(portNames.get(1));
}
/* public String jrkStatusToText(int value){
String out = new String;
if (value & JRKStatus.JRK_STATUS_AWAITING)
out += "Awaiting command\n";
if (value&JRKStatus.JRK_STATUS_NOPOWER)
out+="No motor power\n";
if (value&JRKStatus.JRK_STATUS_DRVERR)
out+="Motor driver error\n";
if (value&JRKStatus.JRK_STATUS_INVINPUT)
out+="Invalid input\n";
if (value&JRKStatus.JRK_STATUS_DISCINPUT)
out+="Input cable disconnected\n";
if (value&JRKStatus.JRK_STATUS_DISCFEEDBACK)
out+="Feedback cable disconnected\n";
if (value&JRKStatus.JRK_STATUS_OVERCURRENT)
out+="Overcurrent\n";
if (value&(JRKStatus.JRK_STATUS_SERERROR|JRKStatus.JRK_STATUS_SEROVERRUN|JRKStatus.JRK_STATUS_RXOVERRUN|JRKStatus.JRK_STATUS_CRCERR|JRKStatus.JRK_STATUS_PROTOERR|JRKStatus.JRK_STATUS_TIMEOUT))
out+="Serial communication error\n";
return out;
}
*/
/*
public bool sendJRKcommand(int command, int parameter){
if(command == JRKCommands.JRK_SET_TARGET){
}
}*/
}
|
// Class shamelessy reused from minechem
//TODO Rename to lib, and change to HxC format and then remove above comment
package HxCKDMS.gkpk.data;
public class TimeConst {
public static final int TICKS_PER_SECOND = 20;
public static final int TICKS_PER_MINUTE = TICKS_PER_SECOND * 60;
public static final int FERMENTER_TIME = TICKS_PER_SECOND * 10;
public static final int EXTRACTOR_TIME = TICKS_PER_SECOND * 15;
}
|
package com.balancedpayments;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import com.balancedpayments.core.Resource;
import com.balancedpayments.core.ResourceCollection;
public class Credit extends Resource {
public Date created_at;
public Integer amount;
public String description;
public String status;
public BankAccount bank_account;
public String account_uri;
public Account account;
public Map<String, String> meta;
public static class Collection extends ResourceCollection<Credit> {
public Collection(String uri) {
super(Credit.class, uri);
}
};
@Override
public Map<String, Object> serialize() {
Map<String, Object> payload = new HashMap<String, Object>();
payload.put("meta", meta);
return payload;
}
@Override
public void deserialize(Map<String, Object> payload) {
super.deserialize(payload);
if (payload.containsKey("account_uri")) {
account = null;
account_uri = (String) payload.get("account_uri");
}
else if (payload.containsKey("account") && payload.get("account") != null) {
account = new Account((Map<String, Object>) payload.get("account"));
account_uri = account.uri;
}
else {
account = null;
account_uri = null;
}
bank_account = new BankAccount((Map<String, Object>) payload.get("bank_account"));
created_at = deserializeDate((String) payload.get("created_at"));
amount = ((Double) payload.get("amount")).intValue();
description = (String) payload.get("description");
status = (String) payload.get("status");
meta = (Map<String, String>) payload.get("meta");
}
}
|
package com.box.sdk;
import java.text.ParseException;
import java.util.Date;
import com.eclipsesource.json.JsonObject;
import com.eclipsesource.json.JsonValue;
/**
* The abstract base class for types that can be added to collaborations.
*/
public abstract class BoxCollaborator extends BoxResource {
/**
* Constructs a BoxCollaborator for a collaborator with a given ID.
* @param api the API connection to be used by the collaborator.
* @param id the ID of the collaborator.
*/
public BoxCollaborator(BoxAPIConnection api, String id) {
super(api, id);
}
/**
* Contains additional information about a BoxCollaborator.
*
* @param <T> the type of the collaborator associated with this info.
*/
public abstract class Info<T extends BoxCollaborator> extends BoxResource.Info<T> {
private String name;
private Date createdAt;
private Date modifiedAt;
/**
* Constructs an empty Info object.
*/
public Info() {
super();
}
/**
* Constructs an Info object by parsing information from a JSON string.
* @param json the JSON string to parse.
*/
public Info(String json) {
super(json);
}
/**
* Constructs an Info object using an already parsed JSON object.
* @param jsonObject the parsed JSON object.
*/
protected Info(JsonObject jsonObject) {
super(jsonObject);
}
/**
* Gets the name of the collaborator.
* @return the name of the collaborator.
*/
public String getName() {
return this.name;
}
/**
* Sets the name of the collaborator.
* @param name the new name of the collaborator.
*/
public void setName(String name) {
this.name = name;
this.addPendingChange("name", name);
}
/**
* Gets the date that the collaborator was created.
* @return the date that the collaborator was created.
*/
public Date getCreatedAt() {
return this.createdAt;
}
/**
* Gets the date that the collaborator was modified.
* @return the date that the collaborator was modified.
*/
public Date getModifiedAt() {
return this.modifiedAt;
}
@Override
protected void parseJSONMember(JsonObject.Member member) {
super.parseJSONMember(member);
try {
JsonValue value = member.getValue();
switch (member.getName()) {
case "name":
this.name = value.asString();
break;
case "created_at":
this.createdAt = BoxDateParser.parse(value.asString());
break;
case "modified_at":
this.modifiedAt = BoxDateParser.parse(value.asString());
break;
default:
break;
}
} catch (ParseException e) {
assert false : "A ParseException indicates a bug in the SDK.";
}
}
}
}
|
package com.bugsnag.android;
import android.app.Activity;
import android.content.Context;
import android.os.SystemClock;
import android.util.Log;
import com.bugsnag.MetaData;
public class Bugsnag {
private static Client client;
private static final String TAG = "Bugsnag";
static long startTime = SystemClock.elapsedRealtime();
public static void register(Context androidContext, String apiKey) {
register(androidContext, apiKey, false);
}
public static void register(Context androidContext, String apiKey, boolean enableMetrics) {
// Create the bugsnag client
try {
client = new Client(androidContext, apiKey, enableMetrics);
} catch(Exception ex) {
Log.e(TAG, "Unable to register with bugsnag. ", ex);
}
}
public static void setContext(final String context) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setContext(context);
}
});
}
public static void setContext(final Activity context) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setContext(context);
}
});
}
public static void setUserId(final String userId) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setUserId(userId);
}
});
}
public static void setReleaseStage(final String releaseStage) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setReleaseStage(releaseStage);
}
});
}
public static void setNotifyReleaseStages(final String... notifyReleaseStages) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setNotifyReleaseStages(notifyReleaseStages);
}
});
}
public static void setAutoNotify(final boolean autoNotify) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setAutoNotify(autoNotify);
}
});
}
public static void setUseSSL(final boolean useSSL) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setUseSSL(useSSL);
}
});
}
public static void setEndpoint(final String endpoint) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setEndpoint(endpoint);
}
});
}
public static void setIgnoreClasses(final String... ignoreClasses) {
runOnClient(new Runnable() {
@Override
public void run() {
client.setIgnoreClasses(ignoreClasses);
}
});
}
public static void addToTab(final String tab, final String key, final Object value) {
runOnClient(new Runnable() {
@Override
public void run() {
client.addToTab(tab, key, value);
}
});
}
public static void notify(Throwable e) {
notify(e, null);
}
public static void notify(final Throwable e, final MetaData overrides) {
runOnClient(new Runnable() {
@Override
public void run() {
client.notify(e, overrides);
}
});
}
private static void runOnClient(Runnable delegate) {
if(client != null) {
try {
delegate.run();
} catch(Exception ex) {
Log.e(TAG, "Error in bugsnag.", ex);
}
} else {
Log.e(TAG, "Method called on Bugsnag before register.");
}
}
}
|
package com.music.tools;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.SourceDataLine;
import org.apache.commons.lang.ArrayUtils;
import jm.JMC;
import com.music.MainPartGenerator.MainPartContext;
import com.music.ScoreContext;
import com.music.util.music.Chance;
import com.music.util.music.ToneResolver;
public class ScaleTester {
private static final int SCALE_SIZE = 7;
private static final boolean USE_ET = true;
private static final int CHROMATIC_SCALE_SILZE = 12;
private static final double FUNDAMENTAL_FREQUENCY = 262.626;
// chromatic-to-scale ratio: (12/7) 1,7142857142857142857142857142857
private static Random random = new Random();
private static int sampleRate = 8000;
private static Map<Double, long[]> fractionCache = new HashMap<>();
private static double fundamentalFreq = 0;
public static void main(String[] args) {
System.out
.println("Usage: java ScaleTester <fundamental frequency> <chromatic scale size> <scale size> <use ET>");
final AudioFormat af = new AudioFormat(sampleRate, 16, 1, true, true);
try {
fundamentalFreq = getArgument(args, 0, FUNDAMENTAL_FREQUENCY, Double.class);
int pitchesInChromaticScale = getArgument(args, 1, CHROMATIC_SCALE_SILZE, Integer.class);
List<Double> harmonicFrequencies = new ArrayList<>();
List<String> ratios = new ArrayList<>();
Set<Double> frequencies = new HashSet<Double>();
frequencies.add(fundamentalFreq);
int octaveMultiplier = 2;
for (int i = 2; i < 100; i++) {
// Exclude the 7th harmonic TODO exclude the 11th as well?
if (i % 7 == 0) {
continue;
}
double actualFreq = fundamentalFreq * i;
double closestTonicRatio = actualFreq / (fundamentalFreq * octaveMultiplier);
if (closestTonicRatio < 1 || closestTonicRatio > 2) {
octaveMultiplier *= 2;
}
double closestTonic = actualFreq - actualFreq % (fundamentalFreq * octaveMultiplier);
double normalizedFreq = fundamentalFreq * (actualFreq / closestTonic);
harmonicFrequencies.add(actualFreq);
frequencies.add(normalizedFreq);
if (frequencies.size() == pitchesInChromaticScale) {
break;
}
}
System.out.println("Harmonic (overtone) frequencies: " + harmonicFrequencies);
System.out.println("Transposed harmonic frequencies: " + frequencies);
List<Double> chromaticScale = new ArrayList<>(frequencies);
Collections.sort(chromaticScale);
// find the "perfect" interval (e.g. perfect fifth)
int perfectIntervalIndex = 0;
int idx = 0;
for (Iterator<Double> it = chromaticScale.iterator(); it.hasNext();) {
Double noteFreq = it.next();
long[] fraction = findCommonFraction(noteFreq / fundamentalFreq);
fractionCache.put(noteFreq, fraction);
if (fraction[0] == 3 && fraction[1] == 2) {
perfectIntervalIndex = idx;
System.out.println("Perfect interval (3/2) idx: " + perfectIntervalIndex);
}
idx++;
ratios.add(Arrays.toString(fraction));
}
System.out.println("Ratios to fundemental frequency: " + ratios);
if (getBooleanArgument(args, 4, USE_ET)) {
chromaticScale = temper(chromaticScale);
}
System.out.println();
System.out.println("Chromatic scale: " + chromaticScale);
Set<Double> scaleSet = new HashSet<Double>();
scaleSet.add(chromaticScale.get(0));
idx = 0;
List<Double> orderedInCircle = new ArrayList<>();
// now go around the circle of perfect intervals and put the notes
// in order
while (orderedInCircle.size() < chromaticScale.size()) {
orderedInCircle.add(chromaticScale.get(idx));
idx += perfectIntervalIndex;
idx = idx % chromaticScale.size();
}
System.out.println("Pitches Ordered in circle of perfect intervals: " + orderedInCircle);
List<Double> scale = new ArrayList<Double>(scaleSet);
int currentIdxInCircle = orderedInCircle.size() - 1; // start with
// the last
// note in the
// circle
int scaleSize = getArgument(args, 3, SCALE_SIZE, Integer.class);
while (scale.size() < scaleSize) {
double pitch = orderedInCircle.get(currentIdxInCircle % orderedInCircle.size());
if (!scale.contains(pitch)) {
scale.add(pitch);
}
currentIdxInCircle++;
}
Collections.sort(scale);
System.out.println("Scale: " + scale);
SourceDataLine line = AudioSystem.getSourceDataLine(af);
line.open(af);
line.start();
Double[] scaleFrequencies = scale.toArray(new Double[scale.size()]);
// first play the whole scale
WaveMelodyGenerator.playScale(line, scaleFrequencies);
// then generate a random melody in the scale
WaveMelodyGenerator.playMelody(line, scaleFrequencies);
line.drain();
line.close();
} catch (Exception e) {
e.printStackTrace();
}
}
private static boolean getBooleanArgument(String[] args, int i, boolean defaultValue) {
if (args.length > i) {
return Boolean.parseBoolean(args[i]);
} else {
return defaultValue;
}
}
private static <T extends Number> T getArgument(String[] args, int i, T defaultValue, Class<T> resultClass) {
if (args.length > i) {
return resultClass.cast(Double.parseDouble(args[i]));
} else {
return defaultValue;
}
}
private static List<Double> temper(List<Double> chromaticScale) {
System.out.println("Before temper: " + chromaticScale);
Double currentNote = chromaticScale.get(0);
List<Double> result = new ArrayList<Double>();
result.add(currentNote);
double ratio = Math.pow(2, 1d / chromaticScale.size());
for (int i = 1; i < chromaticScale.size(); i++) {
currentNote = currentNote * ratio;
currentNote = ((int) (currentNote * 1000)) / 1000d;
result.add(currentNote);
// Fill the fractions cache with the new values:
long[] fraction = findCommonFraction(currentNote / fundamentalFreq);
fractionCache.put(currentNote, fraction);
}
return result;
}
public static long[] findCommonFraction(double decimal) {
long multiplier = 100000000l;
long numerator = (int) (decimal * multiplier);
long denominator = multiplier;
long[] result = simplify(numerator, denominator);
return result;
}
private static long[] simplify(long numerator, long denominator) {
int divisor = 2;
long maxDivisor = Math.min(numerator, denominator) / 2;
while (divisor < maxDivisor) {
if (numerator % divisor == 0 && denominator % divisor == 0) {
numerator = numerator / divisor;
denominator = denominator / divisor;
} else {
divisor++;
}
}
return new long[] { numerator, denominator };
}
/**
* Low-level sound wave handling
*
*/
public static class WavePlayer {
public static void playNotes(SourceDataLine line, double[] frequencies) {
for (int i = 0; i < frequencies.length; i++) {
playNote(line, frequencies[i]);
}
}
public static void playNotes(SourceDataLine line, Double[] frequencies) {
playNotes(line, ArrayUtils.toPrimitive(frequencies));
}
public static void playNote(SourceDataLine line, double frequency) {
play(line, generateSineWavefreq(frequency, 1));
}
private static void play(SourceDataLine line, byte[] array) {
int length = sampleRate * array.length / 1000;
line.write(array, 0, array.length);
}
private static byte[] generateSineWavefreq(double frequencyOfSignal, double seconds) {
byte[] sin = new byte[(int) (seconds * sampleRate)];
double samplingInterval = (double) (sampleRate / frequencyOfSignal);
for (int i = 0; i < sin.length; i++) {
double angle = (2.0 * Math.PI * i) / samplingInterval;
sin[i] = (byte) (Math.sin(angle) * 127);
}
return sin;
}
}
/**
* Simple class that generates and plays a melody in a given scale
*
*/
public static class WaveMelodyGenerator {
private static void playMelody(SourceDataLine line, Double[] scaleFrequencies) {
int position;
MainPartContext lCtx = new MainPartContext();
lCtx.setDirectionUp(true);
ScoreContext ctx = new ScoreContext();
double[] melody = new double[30];
double[] lengths = new double[30];
for (int i = 0; i < 30; i++) {
position = getNextNotePitchIndex(ctx, lCtx, scaleFrequencies);
double freq = scaleFrequencies[position];
double length = getNoteLength(lCtx);
melody[i] = freq;
lengths[i] = length;
}
WavePlayer.playNotes(line, melody);
}
private static void playScale(SourceDataLine line, Double[] scaleFrequencies) {
WavePlayer.playNotes(line, scaleFrequencies);
WavePlayer.playNote(line, scaleFrequencies[0] * 2);
}
/**
* Pieces copied from MainPartGenerator
*/
private static final int[] PROGRESS_TYPE_PERCENTAGES = new int[] { 25, 48, 25, 2 };
private static final int[] NOTE_LENGTH_PERCENTAGES = new int[] { 10, 31, 40, 7, 9, 3 };
public static double getNoteLength(MainPartContext lCtx) {
double length = 0;
int lengthSpec = Chance.choose(NOTE_LENGTH_PERCENTAGES);
// don't allow drastic changes in note length
if (lCtx.getPreviousLength() != 0 && lCtx.getPreviousLength() < 1 && lengthSpec == 5) {
length = 4;
} else if (lCtx.getPreviousLength() != 0 && lCtx.getPreviousLength() >= 2 && lengthSpec == 0) {
lengthSpec = 1;
}
if (lengthSpec == 0
&& (lCtx.getSameLengthNoteSequenceCount() == 0 || lCtx.getSameLengthNoteType() == JMC.SIXTEENTH_NOTE)) {
length = JMC.SIXTEENTH_NOTE;
} else if (lengthSpec == 1
&& (lCtx.getSameLengthNoteSequenceCount() == 0 || lCtx.getSameLengthNoteType() == JMC.EIGHTH_NOTE)) {
length = JMC.EIGHTH_NOTE;
} else if (lengthSpec == 2
&& (lCtx.getSameLengthNoteSequenceCount() == 0 || lCtx.getSameLengthNoteType() == JMC.QUARTER_NOTE)) {
length = JMC.QUARTER_NOTE;
} else if (lengthSpec == 3
&& (lCtx.getSameLengthNoteSequenceCount() == 0 || lCtx.getSameLengthNoteType() == JMC.DOTTED_QUARTER_NOTE)) {
length = JMC.DOTTED_QUARTER_NOTE;
} else if (lengthSpec == 4) {
length = JMC.HALF_NOTE;
} else if (lengthSpec == 5) {
length = JMC.WHOLE_NOTE;
}
// handle sequences of notes with the same length
if (lCtx.getSameLengthNoteSequenceCount() == 0 && Chance.test(17)
&& length <= JMC.DOTTED_QUARTER_NOTE) {
lCtx.setSameLengthNoteSequenceCount(3 + random.nextInt(7));
lCtx.setSameLengthNoteType(length);
}
if (lCtx.getSameLengthNoteSequenceCount() > 0) {
lCtx.setSameLengthNoteSequenceCount(lCtx.getSameLengthNoteSequenceCount() - 1);
}
return length;
}
private static int getNextNotePitchIndex(ScoreContext ctx, MainPartContext lCtx, Double[] frequencies) {
int notePitchIndex;
if (lCtx.getPitches().isEmpty()) {
// avoid excessively high and low notes.
notePitchIndex = 0;
lCtx.getPitchRange()[0] = 0;
lCtx.getPitchRange()[1] = frequencies.length;
} else {
int previousNotePitch = lCtx.getPitches().get(lCtx.getPitches().size() - 1);
boolean shouldResolveToStableTone = shouldResolveToStableTone(lCtx.getPitches(), frequencies);
if (!lCtx.getCurrentChordInMelody().isEmpty()) {
notePitchIndex = lCtx.getCurrentChordInMelody().get(0);
lCtx.getCurrentChordInMelody().remove(0);
} else if (shouldResolveToStableTone) {
notePitchIndex = resolve(previousNotePitch, frequencies);
if (lCtx.getPitches().size() > 1 && notePitchIndex == previousNotePitch) {
// in that case, make a step to break the repetition
// pattern
int pitchChange = getStepPitchChange(frequencies, lCtx.isDirectionUp(),
previousNotePitch);
notePitchIndex = previousNotePitch + pitchChange;
}
} else {
// try getting a pitch. if the pitch range is exceeded, get
// new consonant tone, in the opposite direction, different
// progress type and different interval
int attempt = 0;
// use a separate variable in order to allow change only for
// this particular note, and not for the direction of the
// melody
boolean directionUp = lCtx.isDirectionUp();
do {
int progressType = Chance.choose(PROGRESS_TYPE_PERCENTAGES);
// in some cases change the predefined direction (for
// this pitch only), for a more interesting melody
if ((progressType == 1 || progressType == 2) && Chance.test(15)) {
directionUp = !directionUp;
}
// always follow big jumps with a step back
int needsStepBack = needsStepBack(lCtx.getPitches());
if (needsStepBack != 0) {
progressType = 1;
directionUp = needsStepBack == 1;
}
if (progressType == 1) { // step
int pitchChange = getStepPitchChange(frequencies, directionUp, previousNotePitch);
notePitchIndex = previousNotePitch + pitchChange;
} else if (progressType == 0) { // unison
notePitchIndex = previousNotePitch;
} else { // 2 - intervals
// for a melodic sequence, use only a "jump" of up
// to 6 pitches in current direction
int change = 2 + random.nextInt(frequencies.length - 2);
notePitchIndex = (previousNotePitch + change) % frequencies.length;
}
if (attempt > 0) {
directionUp = !directionUp;
}
// if there are more than 3 failed attempts, simply
// assign a random in-scale, in-range pitch
if (attempt > 3) {
int start = lCtx.getPitchRange()[1] - random.nextInt(6);
for (int i = start; i > lCtx.getPitchRange()[0]; i
if (Arrays.binarySearch(lCtx.getCurrentScale().getDefinition(), i % 12) > -1) {
notePitchIndex = i;
}
}
}
attempt++;
} while (!ToneResolver.isInRange(notePitchIndex, lCtx.getPitchRange()));
}
}
lCtx.getPitches().add(notePitchIndex);
return notePitchIndex;
}
private static int resolve(int previousNotePitch, Double[] frequencies) {
int idx = previousNotePitch + 1;
int step = 1;
while (idx >= 0 && idx < frequencies.length) {
if (fractionCache.get(frequencies[idx])[0] <= 9) {
return idx;
}
if (step > 0) {
step = -step;
} else {
step = -step;
step++;
}
idx += step;
idx = idx % frequencies.length;
}
return 0;
}
private static int needsStepBack(List<Integer> pitches) {
if (pitches.size() < 2) {
return 0;
}
int previous = pitches.get(pitches.size() - 1);
int prePrevious = pitches.get(pitches.size() - 2);
int diff = previous - prePrevious;
if (Math.abs(diff) > 6) {
return (int) -Math.signum(diff); // the opposite direction of
// the previous interval
}
return 0;
}
private static int getStepPitchChange(Double[] frequencies, boolean directionUp, int previousNotePitch) {
int pitchChange = 0;
int[] steps = new int[] { -1, 1 };
if (directionUp) {
steps = new int[] { 1, -1, };
}
for (int i : steps) {
// if the pitch is in the predefined direction and it is within
// the scale - use it.
if (previousNotePitch + i < frequencies.length && previousNotePitch + i > 0) {
pitchChange = i;
}
// in case no other matching tone is found that is common, the
// last appropriate one will be retained in "pitchChange"
}
return pitchChange;
}
private static boolean shouldResolveToStableTone(List<Integer> pitches, Double[] frequencies) {
// if the previous two pitches are unstable
int previousNotePitch = pitches.get(pitches.size() - 1);
int prePreviousNotePitch = 0;
if (pitches.size() >= 2) {
prePreviousNotePitch = pitches.get(pitches.size() - 2);
}
long[] previousRatio = fractionCache.get(frequencies[previousNotePitch]);
long[] prePreviousRatio = fractionCache.get(frequencies[prePreviousNotePitch]);
if (prePreviousNotePitch != 0 && previousRatio[0] > 9 && prePreviousRatio[0] > 9) {
return true;
}
return false;
}
}
}
|
// samskivert library - useful routines for java programs
// This library is free software; you can redistribute it and/or modify it
// (at your option) any later version.
// This library is distributed in the hope that it will be useful,
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// You should have received a copy of the GNU Lesser General Public
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
package com.samskivert.util;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
/**
* Provides utility routines to simplify obtaining randomized values.
*
* Each instance of Randoms is completely thread safe, but will share an underlying
* {@link Random} object. If you wish to have a private stream of pseudorandom numbers,
* use the {@link #with} factory.
*/
public class Randoms
{
/** A default Randoms that can be safely shared by any caller. */
public static final Randoms RAND = with(new Random());
/**
* A factory to create a new Randoms object.
*/
public static Randoms with (Random rand)
{
return new Randoms(rand);
}
/**
* Get a thread-local Randoms instance that will not contend with any other thread
* for random number generation.
*
* <p><b>Note:</b> while all Randoms instances are thread-safe, normally they use a
* java.util.Random internally that must protect against multiple threads generating
* psuedorandom numbers with it simultaneously. This method will return a Randoms
* that uses an internal Random subclass with no such safeguards, resulting in much
* less overhead. However, you should probably not store a reference to the result,
* but instead always use it immediately as in the following example:
* <pre style="code">
* Puppy pick = Randoms.threadLocal().pick(Puppy.LITTER, null);
* </pre>
*/
public static Randoms threadLocal ()
{
return _localRandoms.get();
}
public int getInt (int high)
{
return _r.nextInt(high);
}
public int getInRange (int low, int high)
{
return low + _r.nextInt(high - low);
}
/**
* Returns a pseudorandom, uniformly distributed float value between 0.0 (inclusive) and the
* specified value (exclusive).
*
* @param high the high value limiting the random number sought.
*/
public float getFloat (float high)
{
return _r.nextFloat() * high;
}
/**
* Returns a pseudorandom, uniformly distributed <code>float</code> value between
* <code>low</code> (inclusive) and <code>high</code> (exclusive).
*/
public float getInRange (float low, float high)
{
return low + (_r.nextFloat() * (high - low));
}
public boolean getChance (int n)
{
return (0 == _r.nextInt(n));
}
/**
* Has a probability p of returning true.
*/
public boolean getProbability (float p)
{
return _r.nextFloat() < p;
}
/**
* Returns true or false with approximately even distribution.
*/
public boolean getBoolean ()
{
return _r.nextBoolean();
}
public <T> T getWeighted (Map<T, Integer> valuesToWeights)
{
// TODO: validation?
int idx = _r.nextInt(Folds.sum(0, valuesToWeights.values()));
for (Map.Entry<T, Integer> entry : valuesToWeights.entrySet()) {
idx -= entry.getValue();
if (idx < 0) {
return entry.getKey();
}
}
throw new AssertionError("Not possible");
}
/**
* Pick a random element from the specified Iterator, or return <code>ifEmpty</code>
* if it is empty.
*
* <p><b>Implementation note:</b> because the total size of the Iterator is not known,
* the random number generator is queried after the second element and every element
* thereafter.
*
* @throws NullPointerException if the iterator is null.
*/
public <T> T pick (Iterator<? extends T> iterator, T ifEmpty)
{
if (!iterator.hasNext()) {
return ifEmpty;
}
T pick = iterator.next();
for (int count = 2; iterator.hasNext(); count++) {
T next = iterator.next();
if (0 == _r.nextInt(count)) {
pick = next;
}
}
return pick;
}
/**
* Pick a random element from the specified Iterable, or return <code>ifEmpty</code>
* if it is empty.
*
* <p><b>Implementation note:</b> optimized implementations are used if the Iterable
* is a List or Collection. Otherwise, it behaves as if calling {@link #pick(Iterator)} with
* the Iterable's Iterator.
*
* @throws NullPointerException if the iterable is null.
*/
public <T> T pick (Iterable<? extends T> iterable, T ifEmpty)
{
return pickPluck(iterable, ifEmpty, false);
}
/**
* Pluck (remove) a random element from the specified Iterable, or return <code>ifEmpty</code>
* if it is empty.
*
* <p><b>Implementation note:</b> optimized implementations are used if the Iterable
* is a List or Collection. Otherwise, two Iterators are created from the Iterable
* and a random number is generated after the second element and all beyond.
*
* @throws NullPointerException if the iterable is null.
* @throws UnsupportedOperationException if the iterable is unmodifiable or its Iterator
* does not support {@link Iterator#remove()}.
*/
public <T> T pluck (Iterable<? extends T> iterable, T ifEmpty)
{
return pickPluck(iterable, ifEmpty, true);
}
/**
* Construct a Randoms.
*/
protected Randoms (Random rand)
{
_r = rand;
}
/**
* Shared code for pick and pluck.
*/
@SuppressWarnings("unchecked")
protected <T> T pickPluck (Iterable<? extends T> iterable, T ifEmpty, boolean remove)
{
if (iterable instanceof Collection) {
// optimized path for Collection
Collection<? extends T> coll = (Collection<? extends T>)iterable;
int size = coll.size();
if (size == 0) {
return ifEmpty;
}
if (coll instanceof List) {
// extra-special optimized path for Lists
List<? extends T> list = (List<? extends T>)coll;
int idx = _r.nextInt(size);
return (T) (remove ? list.remove(idx) : list.get(idx));
}
// for other Collections, we must iterate
Iterator<? extends T> it = coll.iterator();
for (int idx = _r.nextInt(size); idx > 0; idx
it.next();
}
try {
return it.next();
} finally {
if (remove) {
it.remove();
}
}
}
if (!remove) {
return pick(iterable.iterator(), ifEmpty);
}
// from here on out, we're doing a pluck with a complicated two-iterator solution
Iterator<? extends T> it = iterable.iterator();
if (!it.hasNext()) {
return ifEmpty;
}
Iterator<? extends T> lagIt = iterable.iterator();
T pick = it.next();
lagIt.next();
for (int count = 2, lag = 1; it.hasNext(); count++, lag++) {
T next = it.next();
if (0 == _r.nextInt(count)) {
pick = next;
for ( ; lag > 0; lag
lagIt.next();
}
}
}
lagIt.remove();
return pick;
}
/** The random number generator. */
protected final Random _r;
/** A ThreadLocal for accessing a thread-local version of Randoms. */
protected static final ThreadLocal<Randoms> _localRandoms = new ThreadLocal<Randoms>() {
@Override
public Randoms initialValue () {
return with(new ThreadLocalRandom());
}
};
protected static class ThreadLocalRandom extends Random {
// same constants as Random, but must be redeclared because private
private final static long multiplier = 0x5DEECE66DL;
private final static long addend = 0xBL;
private final static long mask = (1L << 48) - 1;
/**
* The random seed. We can't use super.seed.
*/
private long rnd;
/**
* Initialization flag to permit calls to setSeed to succeed only
* while executing the Random constructor. We can't allow others
* since it would cause setting seed in one part of a program to
* unintentionally impact other usages by the thread.
*/
boolean initialized;
// Padding to help avoid memory contention among seed updates in
// different TLRs in the common case that they are located near
// each other.
@SuppressWarnings("unused")
private long pad0, pad1, pad2, pad3, pad4, pad5, pad6, pad7;
/**
* Constructor called only by localRandom.initialValue.
*/
ThreadLocalRandom() {
super();
initialized = true;
}
/**
* Throws {@code UnsupportedOperationException}. Setting seeds in
* this generator is not supported.
*
* @throws UnsupportedOperationException always
*/
@Override
public void setSeed(long seed) {
if (initialized)
throw new UnsupportedOperationException();
rnd = (seed ^ multiplier) & mask;
}
@Override
protected int next(int bits) {
rnd = (rnd * multiplier + addend) & mask;
return (int) (rnd >>> (48-bits));
}
// as of JDK 1.6, this method does not exist in java.util.Random
// public int nextInt(int least, int bound) {
// if (least >= bound)
// return nextInt(bound - least) + least;
public long nextLong(long n) {
if (n <= 0)
throw new IllegalArgumentException("n must be positive");
// Divide n by two until small enough for nextInt. On each
// iteration (at most 31 of them but usually much less),
// randomly choose both whether to include high bit in result
// (offset) and whether to continue with the lower vs upper
// half (which makes a difference only if odd).
long offset = 0;
while (n >= Integer.MAX_VALUE) {
int bits = next(2);
long half = n >>> 1;
long nextn = ((bits & 2) == 0) ? half : n - half;
if ((bits & 1) == 0)
offset += n - nextn;
n = nextn;
}
return offset + nextInt((int) n);
}
public long nextLong(long least, long bound) {
if (least >= bound)
throw new IllegalArgumentException();
return nextLong(bound - least) + least;
}
public double nextDouble(double n) {
if (n <= 0)
throw new IllegalArgumentException("n must be positive");
return nextDouble() * n;
}
public double nextDouble(double least, double bound) {
if (least >= bound)
throw new IllegalArgumentException();
return nextDouble() * (bound - least) + least;
}
private static final long serialVersionUID = -5851777807851030925L;
}
}
|
package com.scaffy.dao;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.apache.commons.beanutils.PropertyUtils;
import org.apache.commons.lang.WordUtils;
import com.scaffy.controller.MultipartResponse;
import com.scaffy.dao.bean.BeanMethod;
import com.scaffy.dao.bean.BeanTraversalException;
import com.scaffy.dao.bean.BeanVisitor;
import com.scaffy.dao.bean.Node;
public abstract class BasicRESTDao implements RESTDao{
private Map<BeanMethod.Method, BeanVisitor> visitors;
public BasicRESTDao() {
visitors = new HashMap<BeanMethod.Method, BeanVisitor>();
}
protected void traverse(
Object bean,
BeanMethod.Method mainMethod) throws BeanTraversalException {
if(bean.getClass().getAnnotation(Node.class) == null){
visitors.get(mainMethod).visit(bean);
} else if(Collection.class.isAssignableFrom(bean.getClass())){
Collection<?> beanAsCollection = (Collection<?>) bean;
for(Object beanEntry : beanAsCollection)
traverse(beanEntry, mainMethod);
} else {
keepTraversing(bean, mainMethod);
}
}
private void keepTraversing(Object bean, BeanMethod.Method mainMethod)
throws BeanTraversalException {
try {
Map<String, Object> descriptor = PropertyUtils.describe(bean);
descriptor.remove("class");
Set<String> properties = descriptor.keySet();
for(String property : properties){
String methodName = "get" + WordUtils.capitalize(property);
BeanMethod beanMethod = bean.getClass().getMethod(methodName).getAnnotation(BeanMethod.class);
if(beanMethod != null)
traverse(descriptor.get(property), beanMethod.method());
else
traverse(descriptor.get(property), mainMethod);
}
} catch (Exception e) {
throw new BeanTraversalException(e);
}
}
protected void addVisitor(BeanMethod.Method method, BeanVisitor beanVisitor) {
visitors.put(method, beanVisitor);
}
/* (non-Javadoc)
* @see com.scaffy.service.EntityService#create(java.lang.Object)
*/
public void create(Object model) throws BeanTraversalException, DaoOperationException {
execute(model, BeanMethod.Method.POST);
}
public void createWithAttachments(MultipartResponse request)
throws BeanTraversalException, DaoOperationException {
execute(request, BeanMethod.Method.POST);
}
/* (non-Javadoc)
* @see com.scaffy.service.EntityService#update(java.lang.Object)
*/
public void update(Object model) throws BeanTraversalException, DaoOperationException {
execute(model, BeanMethod.Method.PUT);
}
/* (non-Javadoc)
* @see com.scaffy.service.EntityService#delete(java.lang.Object)
*/
public void delete(Object model) throws BeanTraversalException, DaoOperationException {
execute(model, BeanMethod.Method.DELETE);
}
protected abstract void execute(Object model, BeanMethod.Method method)
throws BeanTraversalException, DaoOperationException;
protected abstract void execute(MultipartResponse request, BeanMethod.Method method)
throws BeanTraversalException, DaoOperationException;
}
|
package com.web.AbhinavJava;
class Account {
private int accountId;
private String name;
private double balance;
private transient long ssn;
public void setName(String name) { this.name = name; }
public void setBalance(Double balance) { this.balance = balance; }
public void setAccountId(int accountId){ this.accountId = accountId; }
public void setSsn(long ssn) { this.ssn = ssn; }
public int getAccountId() {
return accountId;
}
public String getName() {
return name;
}
public double getBalance() {
return balance;
}
public long getSsn(){return ssn;}
}
|
package crazypants.enderio;
import java.util.ArrayList;
import javax.annotation.Nullable;
import net.minecraft.block.Block;
import net.minecraft.block.material.MapColor;
import net.minecraft.block.material.Material;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.world.World;
import net.minecraftforge.common.util.ForgeDirection;
import cpw.mods.fml.common.registry.GameRegistry;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import crazypants.enderio.api.tool.ITool;
import crazypants.enderio.machine.AbstractMachineEntity;
import crazypants.enderio.tool.ToolUtil;
public abstract class BlockEio extends Block {
protected final Class<? extends TileEntity> teClass;
protected final String name;
protected BlockEio(String name, Class<? extends TileEntity> teClass) {
this(name, teClass, new Material(MapColor.ironColor));
}
protected BlockEio(String name, Class<? extends TileEntity> teClass, Material mat) {
super(mat);
this.teClass = teClass;
this.name = name;
setHardness(0.5F);
setBlockName(name);
setStepSound(Block.soundTypeMetal);
setHarvestLevel("pickaxe", 0);
setCreativeTab(EnderIOTab.tabEnderIO);
}
protected void init() {
GameRegistry.registerBlock(this, name);
if(teClass != null) {
GameRegistry.registerTileEntity(teClass, name + "TileEntity");
}
}
@Override
public boolean hasTileEntity(int metadata) {
return teClass != null;
}
@Override
public TileEntity createTileEntity(World world, int metadata) {
if(teClass != null) {
try {
return teClass.newInstance();
} catch (Exception e) {
Log.error("Could not create tile entity for block " + name + " for class " + teClass);
}
}
return null;
}
@Override
@SideOnly(Side.CLIENT)
public void registerBlockIcons(IIconRegister iIconRegister) {
blockIcon = iIconRegister.registerIcon("enderio:" + name);
}
@Override
public ArrayList<ItemStack> getDrops(World world, int x, int y, int z, int metadata, int fortune) {
return doNormalDrops(world, x, y, z) ? super.getDrops(world, x, y, z, metadata, fortune) : new ArrayList<ItemStack>();
}
/* Subclass Helpers */
@Override
public boolean onBlockActivated(World world, int x, int y, int z, EntityPlayer entityPlayer, int side, float par7, float par8, float par9) {
if(shouldWrench(world, x, y, z, entityPlayer, side) && ToolUtil.breakBlockWithTool(this, world, x, y, z, entityPlayer)) {
return true;
}
ITool tool = ToolUtil.getEquippedTool(entityPlayer);
if(tool != null && !entityPlayer.isSneaking()) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof AbstractMachineEntity) {
((AbstractMachineEntity) te).toggleIoModeForFace(ForgeDirection.getOrientation(side));
world.markBlockForUpdate(x, y, z);
return true;
}
}
if(entityPlayer.isSneaking()) {
return false;
}
return openGui(world, x, y, z, entityPlayer, side);
}
protected boolean shouldWrench(World world, int x, int y, int z, EntityPlayer entityPlayer, int side) {
return true;
}
protected boolean openGui(World world, int x, int y, int z, EntityPlayer entityPlayer, int side) {
return false;
}
public boolean doNormalDrops(World world, int x, int y, int z) {
return true;
}
@Override
public boolean removedByPlayer(World world, EntityPlayer player, int x, int y, int z, boolean doHarvest) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileEntityEio && ((TileEntityEio) te).shouldDrop()) {
if(!world.isRemote && !player.capabilities.isCreativeMode && !doNormalDrops(world, x, y, z)) {
dropAsItem(world, x, y, z, (TileEntityEio) te);
((TileEntityEio) te).preventDrops();
} else if(player.capabilities.isCreativeMode) {
((TileEntityEio) te).preventDrops();
}
}
return super.removedByPlayer(world, player, x, y, z, false);
}
@Override
public void breakBlock(World world, int x, int y, int z, Block block, int meta) {
TileEntity te = world.getTileEntity(x, y, z);
if(te instanceof TileEntityEio && ((TileEntityEio) te).shouldDrop() && !doNormalDrops(world, x, y, z)) {
dropAsItem(world, x, y, z, (TileEntityEio) te);
((TileEntityEio)te).preventDrops();
}
super.breakBlock(world, x, y, z, block, meta);
}
public void dropAsItem(World world, int x, int y, int z, TileEntityEio te) {
int meta = damageDropped(te.getBlockMetadata());
ItemStack itemStack = new ItemStack(this, 1, meta);
processDrop(world, x, y, z, te, itemStack);
dropBlockAsItem(world, x, y, z, itemStack);
}
protected void processDrop(World world, int x, int y, int z, @Nullable TileEntityEio te, ItemStack drop) {
}
}
|
package de.cronn.proxy.ssh;
import java.io.Closeable;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jcraft.jsch.JSchException;
import com.jcraft.jsch.Session;
import de.cronn.proxy.ssh.util.Assert;
public class SshProxy implements Closeable {
private static final Logger log = LoggerFactory.getLogger(SshProxy.class);
public static final String LOCALHOST = "localhost";
private static final int DEFAULT_TIMEOUT_MILLIS = 10_000;
private final Deque<Session> sshSessions = new ArrayDeque<>();
private final Map<Session, Set<Integer>> portForwardings = new LinkedHashMap<>();
private final SshConfiguration sshConfiguration;
private int timeoutMillis;
public SshProxy() {
this(DEFAULT_TIMEOUT_MILLIS);
}
public SshProxy(int timeoutMillis) {
try {
sshConfiguration = SshConfiguration.getConfiguration();
} catch (Exception e) {
throw new SshProxyRuntimeException("Failed to open SSH proxy", e);
}
this.timeoutMillis = timeoutMillis;
}
public int connect(String sshTunnelHost, String host, int port) {
return connect(sshTunnelHost, host, port, 0);
}
public int connect(String sshTunnelHost, String host, int port, int localPort) {
Assert.notNull(sshTunnelHost, "sshTunnelHost must not be null");
Assert.notNull(host, "host must not be null");
Assert.isTrue(port > 0, "illegal port: " + port);
Assert.isTrue(localPort >= 0, "illegal local port: " + localPort);
log.debug("tunneling to {}:{} via {}", host, port, sshTunnelHost);
try {
sshConfiguration.addIdentity(sshTunnelHost);
SshProxyConfig proxyConfig = sshConfiguration.getProxyConfiguration(sshTunnelHost);
if (proxyConfig == null) {
return directConnect(sshTunnelHost, host, port, localPort);
}
int jumpPort = connect(proxyConfig);
String hostUser = sshConfiguration.getHostUser(sshTunnelHost);
String jumpHost = proxyConfig.getJumpHost();
Session jumpHostSession = sshConfiguration.openSession(hostUser, jumpHost, jumpPort);
String hostname = sshConfiguration.getHostName(sshTunnelHost);
jumpHostSession.setHostKeyAlias(hostname);
sshSessions.push(jumpHostSession);
jumpHostSession.setTimeout(timeoutMillis);
jumpHostSession.connect(timeoutMillis);
log.debug("[{}] connected via {}@localhost:{}", sshTunnelHost, hostUser, jumpPort);
return addLocalPortForwarding(sshTunnelHost, jumpHostSession, host, port, localPort);
} catch (Exception e) {
throw new SshProxyRuntimeException("Failed to create SSH tunnel to " + host + " via " + sshTunnelHost, e);
}
}
private int connect(SshProxyConfig proxyConfig) {
String jumpHost = proxyConfig.getJumpHost();
String forwardingHost = proxyConfig.getForwardingHost();
int forwardingPort = proxyConfig.getForwardingPort();
return connect(jumpHost, forwardingHost, forwardingPort);
}
private int directConnect(String jumpHost, String targetHost, int targetPort, int localPort) throws JSchException {
Session jumpHostSession = sshConfiguration.openSession(jumpHost);
sshSessions.add(jumpHostSession);
jumpHostSession.setTimeout(timeoutMillis);
try {
jumpHostSession.connect(timeoutMillis);
} catch (JSchException e) {
log.debug("Failed to connect to {} via {}", targetHost, jumpHost, e);
throw new SshProxyRuntimeException("Failed to connect to " + targetHost + " via " + jumpHost);
}
log.debug("[{}] connected", jumpHost);
return addLocalPortForwarding(jumpHost, jumpHostSession, targetHost, targetPort, localPort);
}
private int addLocalPortForwarding(String sshTunnelHost, Session session, String targetHost, int targetPort, int localPort) throws JSchException {
int localPortReturned = session.setPortForwardingL(localPort, targetHost, targetPort);
log.debug("[{}] local port {} forwarded to {}:{}", sshTunnelHost, localPortReturned, targetHost, targetPort);
Set<Integer> ports = portForwardings.computeIfAbsent(session, k -> new LinkedHashSet<>());
ports.add(Integer.valueOf(localPortReturned));
return localPortReturned;
}
@Override
public void close() {
if (!sshSessions.isEmpty()) {
log.debug("closing SSH sessions");
}
while (!sshSessions.isEmpty()) {
Session session = sshSessions.pop();
deletePortForwarding(session);
try {
session.disconnect();
} catch (Exception e) {
log.error("Failed to disconnect SSH session", e);
}
}
Assert.isTrue(portForwardings.isEmpty(), "port forwardings must be empty at this point");
}
private void deletePortForwarding(Session session) {
Set<Integer> ports = portForwardings.remove(session);
if (ports != null) {
for (Integer localPort : ports) {
try {
String host = session.getHost();
if (host.equals(LOCALHOST)) {
host = session.getHostKeyAlias();
}
session.delPortForwardingL(LOCALHOST, localPort.intValue());
log.debug("deleted local port forwarding on port {} for {}", localPort, host);
} catch (Exception e) {
log.error("failed to delete port forwarding of port {}", localPort, e);
}
}
}
}
}
|
package de.dhbw.vetaraus;
import norsys.netica.*;
import org.apache.commons.lang3.StringUtils;
import java.io.*;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
public class NetFactory {
private NetFactory() {
}
/**
* Create a new Netica net from an existing CSV file. Cases are learned through gradient descent learning algorithm.
*
* @param path
* Filepath of the CSV file
* @return A Netica net.
* @throws NeticaException
* Netica problems.
* @throws IOException
* I/O problems.
*/
public static Net fromCases(String path) throws NeticaException, IOException {
List<Case> cases = SanitizeUtils.sanitizeCases(CSV.parse(path));
Set<String> ageGroupSet = new TreeSet<>();
Set<String> degreeSet = new TreeSet<>();
Set<String> occupationSet = new TreeSet<>();
Set<String> incomeSet = new TreeSet<>();
Set<String> tariffSet = new TreeSet<>();
// Find all states
for (Case c : cases) {
ageGroupSet.add(c.getAge());
degreeSet.add(c.getDegree());
occupationSet.add(c.getOccupation());
incomeSet.add(c.getIncome());
tariffSet.add(c.getTariff());
}
Net net = new Net(Application.getEnvironment());
Caseset caseset = getCaseset(cases);
// Create nodes in net:
NodeList nodeList = new NodeList(net);
Node ageGroupNode = new Node(Constants.NODE_AGE, StringUtils.join(ageGroupSet, ','), net);
Node genderNode = new Node(Constants.NODE_GENDER, "m,w", net);
Node marriedNode = new Node(Constants.NODE_MARRIED, "ja,nein", net);
Node childCountNode = new Node(Constants.NODE_CHILDCOUNT, "_0,_1,_2,_3,_4", net);
Node degreeNode = new Node(Constants.NODE_DEGREE, StringUtils.join(degreeSet, ','), net);
Node occupationNode = new Node(Constants.NODE_OCCUPATION, StringUtils.join(occupationSet, ','), net);
Node incomeNode = new Node(Constants.NODE_INCOME, StringUtils.join(incomeSet, ','), net);
Node tariffNode = new Node(Constants.NODE_INSURANCE, StringUtils.join(tariffSet, ','), net);
// Link nodes:
tariffNode.addLink(ageGroupNode);
tariffNode.addLink(genderNode);
tariffNode.addLink(marriedNode);
tariffNode.addLink(childCountNode);
tariffNode.addLink(degreeNode);
tariffNode.addLink(occupationNode);
tariffNode.addLink(incomeNode);
nodeList.add(ageGroupNode);
nodeList.add(genderNode);
nodeList.add(marriedNode);
nodeList.add(childCountNode);
nodeList.add(degreeNode);
nodeList.add(occupationNode);
nodeList.add(incomeNode);
nodeList.add(tariffNode);
Learner learner = new Learner(Learner.GRADIENT_DESCENT_LEARNING);
learner.learnCPTs(nodeList, caseset, 1.0);
return net;
}
private static Caseset getCaseset(List<Case> cases) throws IOException, NeticaException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
CSV.write(cases, new PrintWriter(out));
InputStream in = new ByteArrayInputStream(out.toByteArray());
Caseset caseset = new Caseset();
caseset.addCases(new Streamer(in, "Cases", Application.getEnvironment()), 1.0, null);
return caseset;
}
public static Net fromExisting(String path) throws NeticaException {
Environ env = Application.getEnvironment();
Net net = new Net(new Streamer(path));
net.compile();
return net;
}
}
|
package de.lessvoid.console;
import java.util.LinkedList;
import org.lwjgl.opengl.GL11;
import de.lessvoid.font.Font;
import de.lessvoid.nifty.render.spi.RenderDevice;
public class Console
{
private Font font;
private int x;
private int y;
private LinkedList<String> data = new LinkedList<String>();
private boolean left;
private int maxLines;
private RenderDevice device;
/**
* create console
* @param newRenderDevice
*/
public Console(RenderDevice device, int maxLines, boolean left)
{
this.device = device;
font= new Font(device);
font.init( "console.fnt" );
this.left= left;
this.maxLines= maxLines;
}
public void clear()
{
data.clear();
}
/**
* Output something to the console.
* @param newLine line to add
*/
public void output(final String newLine) {
String[] lines = newLine.split("\n");
for (String line : lines) {
addSingleLine(line);
}
}
private void addSingleLine(final String newLine) {
data.add(newLine);
if (maxLinesReached()) {
data.removeFirst();
}
}
/**
* returns true when the max lines limit has been reached.
* @return more than max lines
*/
boolean maxLinesReached() {
return data.size() > maxLines;
}
/**
* update console
*/
public void update()
{
// first step: calc max width and height
int maxWidth= 0;
int maxHeight= 0;
for( int i=0; i<data.size(); i++ )
{
maxHeight += font.getHeight();
int width= font.getStringWidth( data.get( i ));
if( width > maxWidth )
maxWidth= width;
}
// second step: do actual render
beginRender( maxWidth, maxHeight );
for( int i=0; i<data.size(); i++ )
{
outputString( data.get( i ));
}
endRender();
}
private void beginRender( int maxWidth, int maxHeight )
{
GL11.glPushAttrib( GL11.GL_CURRENT_BIT );
GL11.glEnable( GL11.GL_BLEND );
GL11.glBlendFunc( GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA );
if( left )
{
x= 10;
y= device.getHeight() - maxHeight - 10;
}
else
{
x= device.getWidth() - maxWidth - 10;
y= device.getHeight() - maxHeight - 10;
}
}
private void endRender()
{
GL11.glPopAttrib();
}
private void outputString( String text )
{
GL11.glColor3f( 1.0f, 1.0f, 1.0f );
font.drawString( x, y, text );
y += font.getHeight();
}
}
|
package dta.commerce.dao;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import dta.commerce.persistance.Produit;
import dta.commerce.persistance.User;
public class ProduitDAO implements IProduitDAO {
@PersistenceContext(unitName="my_pu") private EntityManager em;
@Override
public Produit getProduit(int idProduit) {
Produit produit = em.find(Produit.class, idProduit);
return produit;
}
@Override
public void addProduit(Produit produit) {
em.persist(produit);
}
@Override
public void deleteProduit(int idProduit) {
//em.remove(em.find(Produit.class, idProduit));
Produit p = getProduit(idProduit);
p.setActif(false);
em.merge(p);
}
@Override
public void updateProduit(Produit produit) {
em.merge(produit);
}
@Override
public List<Produit> listerProduits() {
return em.createNamedQuery("Produit.findAll").getResultList();
}
}
|
package imcode.util;
import com.imcode.imcms.domain.dto.ImageData;
import com.imcode.imcms.domain.dto.ImageData.CropRegion;
import com.imcode.imcms.domain.dto.ImageData.RotateDirection;
import com.imcode.imcms.domain.dto.ImageFileDTO;
import com.imcode.imcms.mapping.DocumentMapper;
import com.imcode.imcms.persistence.entity.Image;
import com.imcode.imcms.persistence.entity.ImageCropRegion;
import com.imcode.imcms.servlet.ImcmsSetupFilter;
import imcode.server.Imcms;
import imcode.server.ImcmsServices;
import imcode.server.document.DocumentDomainObject;
import imcode.server.document.FileDocumentDomainObject;
import imcode.server.document.textdocument.*;
import imcode.util.image.Filter;
import imcode.util.image.Format;
import imcode.util.image.ImageOp;
import imcode.util.image.Resize;
import imcode.util.io.FileUtility;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.imageio.ImageIO;
import javax.imageio.ImageReader;
import javax.imageio.stream.FileImageInputStream;
import javax.imageio.stream.ImageInputStream;
import java.awt.*;
import java.io.*;
import java.util.Date;
import java.util.Iterator;
import java.util.Objects;
import java.util.UUID;
@Component
public class ImcmsImageUtils {
private static final Log log = LogFactory.getLog(ImcmsImageUtils.class);
private static final int GEN_FILE_LENGTH = 255;
public static File imagesPath;
public static String imageMagickPath;
private static String imagesUrl;
@Value("${ImagePath}")
private File imgPath;
@Value("${ImageUrl}")
private String imgUrl;
@Value("${ImageMagickPath}")
private String imgMagickPath;
public static String generateImageFileName(ImageData imageData) {
String suffix = "_" + UUID.randomUUID().toString();
Format fmt = imageData.getFormat();
if (fmt != null) {
suffix += "." + fmt.getExtension();
}
final int maxLength = GEN_FILE_LENGTH - suffix.length();
String filename = imageData.getSource().getNameWithoutExt();
if (filename.length() > maxLength) {
filename = filename.substring(0, maxLength);
}
filename = Utility.normalizeString(filename);
return filename + suffix;
}
public static ImageFileDTO fileToImageFileDTO(File imageFile) {
final ImageFileDTO imageFileDTO = new ImageFileDTO();
final String fileName = imageFile.getName();
imageFileDTO.setName(fileName);
imageFileDTO.setFormat(Format.findFormat(FilenameUtils.getExtension(fileName)));
final String relativePath = imageFile.getPath().replace(imagesPath.getPath(), "");
imageFileDTO.setPath(relativePath);
final Date lastModifiedDate = new Date(imageFile.lastModified());
final String formattedDate = DateConstants.DATETIME_DOC_FORMAT.format(lastModifiedDate);
imageFileDTO.setUploaded(formattedDate);
long fileSize = imageFile.length();
String suffix;
if (fileSize >= (1024L * 1024L)) {
suffix = "MB";
fileSize /= 1024L * 1024L;
} else if (fileSize >= 1024L) {
suffix = "kB";
fileSize /= 1024L;
} else {
suffix = "B";
}
imageFileDTO.setSize(String.valueOf(fileSize) + suffix);
final Dimension imageDimension = getImageDimension(imageFile);
if (imageDimension != null) {
imageFileDTO.setWidth(imageDimension.width);
imageFileDTO.setHeight(imageDimension.height);
imageFileDTO.setResolution(String.valueOf(imageDimension.width) + "x" + imageDimension.height);
}
return imageFileDTO;
}
private static Dimension getImageDimension(File imgFile) {
final String suffix = FilenameUtils.getExtension(imgFile.getName());
final Iterator<ImageReader> iter = ImageIO.getImageReadersBySuffix(suffix);
while (iter.hasNext()) {
final ImageReader reader = iter.next();
try {
final ImageInputStream stream = new FileImageInputStream(imgFile);
reader.setInput(stream);
final int width = reader.getWidth(reader.getMinIndex());
final int height = reader.getHeight(reader.getMinIndex());
return new Dimension(width, height);
} catch (IOException e) {
log.warn("Error reading: " + imgFile.getAbsolutePath(), e);
} finally {
reader.dispose();
}
}
return null;
}
public static ImageSource getImageSource(String imagePath) {
ImageSource imageSource = new NullImageSource();
if (imagePath.startsWith("/")) {
imagePath = imagePath.substring(1);
}
if (StringUtils.isNotBlank(imagePath)) {
imageSource = new ImagesPathRelativePathImageSource(imagePath);
}
return imageSource;
}
public static ImageSource createImageSourceFromString(String imageUrl) {
ImageSource imageSource = new NullImageSource();
if (StringUtils.isNotBlank(imageUrl)) {
ImcmsServices services = Imcms.getServices();
DocumentMapper documentMapper = services.getDocumentMapper();
String documentIdString = ImcmsSetupFilter.getDocumentIdString(services, imageUrl);
DocumentDomainObject document = documentMapper.getDocument(documentIdString);
if (document instanceof FileDocumentDomainObject) {
imageSource = new FileDocumentImageSource(documentMapper.getDocumentReference(document));
} else {
String imageArchiveImagesUrl = ImageArchiveImageSource.getImagesUrlPath();
String imagesPath = ImagesPathRelativePathImageSource.getImagesUrlPath();
if (imageUrl.startsWith(imageArchiveImagesUrl)) {
imageUrl = imageUrl.substring(imageArchiveImagesUrl.length());
if (StringUtils.isNotBlank(imageUrl)) {
imageSource = new ImageArchiveImageSource(imageUrl);
}
} else {
if (imageUrl.startsWith(imagesPath)) {
imageUrl = imageUrl.substring(imagesPath.length());
}
if (StringUtils.isNotBlank(imageUrl)) {
imageSource = new ImagesPathRelativePathImageSource(imageUrl);
}
}
}
}
return imageSource;
}
public static void generateImage(ImageData image, boolean overwrite) {
File genFile = new File(imagesPath, "generated/" + image.getGeneratedFilename());
if (!overwrite && genFile.exists()) {
return;
}
ImageSource source = image.getSource();
if (source instanceof NullImageSource) {
return;
}
InputStream input = null;
OutputStream output = null;
File tempFile = null;
try {
String imagePathCanon = imagesPath.getCanonicalPath();
String genFileCanon = genFile.getCanonicalPath();
if (!genFileCanon.startsWith(imagePathCanon)) {
return;
}
File parentFile = genFile.getParentFile();
if (!parentFile.exists()) {
parentFile.mkdir();
}
tempFile = File.createTempFile("genimg", null);
input = source.getInputStreamSource().getInputStream();
output = new BufferedOutputStream(new FileOutputStream(tempFile));
IOUtils.copy(input, output);
IOUtils.closeQuietly(output);
generateImage(tempFile, genFile, image.getFormat(), image.getWidth(), image.getHeight(), image.getResize(),
image.getCropRegion(), image.getRotateDirection());
} catch (Exception ex) {
log.warn(ex.getMessage(), ex);
} finally {
IOUtils.closeQuietly(input);
IOUtils.closeQuietly(output);
if (tempFile != null) {
try {
FileUtility.forceDelete(tempFile);
} catch (IOException e) {
log.error("Can't delete file " + tempFile, e);
}
}
}
}
private static void generateImage(File imageFile, File destFile, Format format, int width, int height,
Resize resize, CropRegion cropRegion, RotateDirection rotateDir) {
ImageOp operation = new ImageOp(imageMagickPath).input(imageFile);
if (rotateDir != RotateDirection.NORTH) {
operation.rotate(rotateDir.getAngle());
}
if (cropRegion.isValid()) {
int cropWidth = cropRegion.getWidth();
int cropHeight = cropRegion.getHeight();
operation.crop(cropRegion.getCropX1(), cropRegion.getCropY1(), cropWidth, cropHeight);
}
if (width > 0 || height > 0) {
Integer w = (width > 0 ? width : null);
Integer h = (height > 0 ? height : null);
if (resize == null) {
resize = (width > 0 && height > 0 ? Resize.FORCE : Resize.DEFAULT);
}
operation.filter(Filter.LANCZOS);
operation.resize(w, h, resize);
}
if (format != null) {
operation.outputFormat(format);
}
operation.processToFile(destFile);
}
public static ImageDomainObject toDomainObject(Image image) {
if (image == null) return null;
ImageDomainObject imageDO = new ImageDomainObject();
imageDO.setAlign(image.getAlign());
imageDO.setAlternateText(image.getAlternateText());
imageDO.setArchiveImageId(image.getArchiveImageId());
imageDO.setBorder(image.getBorder());
ImageCropRegion cropRegion = image.getCropRegion();
ImageDomainObject.CropRegion cropRegionDO = new ImageDomainObject.CropRegion(
cropRegion.getCropX1(), cropRegion.getCropY1(), cropRegion.getCropX2(), cropRegion.getCropY2()
);
imageDO.setCropRegion(cropRegionDO);
imageDO.setGeneratedFilename(image.getGeneratedFilename());
imageDO.setHeight(image.getHeight());
imageDO.setHorizontalSpace(image.getHorizontalSpace());
imageDO.setLinkUrl(image.getLinkUrl());
imageDO.setLowResolutionUrl(image.getLowResolutionUrl());
imageDO.setName(image.getName());
imageDO.setResize(Resize.getByOrdinal(image.getResize()));
imageDO.setTarget(image.getTarget());
imageDO.setVerticalSpace(image.getVerticalSpace());
imageDO.setWidth(image.getWidth());
return initImageSource(image, imageDO);
}
private static ImageDomainObject initImageSource(Image jpaImage, ImageDomainObject imageDO) {
String url = jpaImage.getUrl();
Integer type = jpaImage.getType();
Objects.requireNonNull(url);
Objects.requireNonNull(type);
imageDO.setSource(createImageSource(imageDO, url.trim(), type));
return imageDO;
}
private static ImageSource createImageSource(ImageDomainObject image, String url, int type) {
switch (type) {
case ImageSource.IMAGE_TYPE_ID__FILE_DOCUMENT:
throw new IllegalStateException(
String.format("Illegal image source type - IMAGE_TYPE_ID__FILE_DOCUMENT. Image: %s", image)
);
case ImageSource.IMAGE_TYPE_ID__IMAGES_PATH_RELATIVE_PATH:
return new ImagesPathRelativePathImageSource(url);
case ImageSource.IMAGE_TYPE_ID__IMAGE_ARCHIVE:
return new ImageArchiveImageSource(url);
default:
return new NullImageSource();
}
}
@PostConstruct
public void init() {
ImcmsImageUtils.imagesPath = imgPath;
ImcmsImageUtils.imagesUrl = imgUrl;
ImcmsImageUtils.imageMagickPath = imgMagickPath;
}
}
|
package info.bowkett.ddt;
import org.mockito.stubbing.Answer;
import java.sql.*;
import java.util.Calendar;
import static org.mockito.BDDMockito.given;
import static org.mockito.Matchers.*;
import static org.mockito.Mockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;
public class DDTFixture {
private final ResultSet resultSet;
private final Connection connection;
private CallableStatement callableStatement;
private Statement statement;
private PreparedStatement preparedStatement;
public DDTFixture(Connection connection, ResultSet resultSet, Statement statement) {
this(connection, resultSet, mock(PreparedStatement.class), statement, mock(CallableStatement.class));
}
public DDTFixture(Connection connection, ResultSet resultSet, PreparedStatement preparedStatement) {
this(connection, resultSet, preparedStatement, mock(Statement.class), mock(CallableStatement.class));
}
public DDTFixture(Connection connection, ResultSet resultSet, CallableStatement callableStatement) {
this(connection, resultSet, mock(PreparedStatement.class), mock(Statement.class), callableStatement);
}
private DDTFixture(Connection connection, ResultSet resultSet, PreparedStatement preparedStatement, Statement statement, CallableStatement callableStatement) {
this.connection = connection;
this.resultSet = resultSet;
this.preparedStatement = preparedStatement;
this.statement = statement;
this.callableStatement = callableStatement;
wireInStatements();
}
public static DDTFixture forPreparedStatement(Connection connection) {
return new DDTFixture(connection, mock(ResultSet.class), mock(PreparedStatement.class));
}
public static DDTFixture forStatement(Connection connection) {
return new DDTFixture(connection, mock(ResultSet.class), mock(Statement.class));
}
public static DDTFixture forCallableStatement(Connection connection) {
return new DDTFixture(connection, mock(ResultSet.class), mock(CallableStatement.class));
}
private void wireInStatements() {
try {
wireInStatement(connection, resultSet, statement);
wireInPreparedStatement(connection, resultSet, preparedStatement);
wireInCallableStatement(connection, resultSet, callableStatement);
}
catch (SQLException e) {
e.printStackTrace();
throw new RuntimeException("Cannot wire in Statement. Please report this as a bug.");
}
}
private void wireInPreparedStatement(Connection connection, ResultSet resultSet, PreparedStatement preparedStatement) throws SQLException {
when(connection.prepareStatement(anyString())).thenReturn(preparedStatement);
when(connection.prepareStatement(anyString(), anyInt())).thenReturn(preparedStatement);
when(connection.prepareStatement(anyString(), anyInt(), anyInt())).thenReturn(preparedStatement);
when(connection.prepareStatement(anyString(), anyInt(), anyInt(), anyInt())).thenReturn(preparedStatement);
when(connection.prepareStatement(anyString(), any(new int[0].getClass()))).thenReturn(preparedStatement);
when(connection.prepareStatement(anyString(), any(new String[0].getClass()))).thenReturn(preparedStatement);
when(preparedStatement.executeQuery()).thenReturn(resultSet);
when(preparedStatement.executeQuery(anyString())).thenReturn(resultSet);
}
private void wireInStatement(Connection connection, ResultSet resultSet, Statement statement) throws SQLException {
when(connection.createStatement()).thenReturn(statement);
when(connection.createStatement(anyInt(), anyInt())).thenReturn(statement);
when(connection.createStatement(anyInt(), anyInt(), anyInt())).thenReturn(statement);
when(statement.executeQuery(anyString())).thenReturn(resultSet);
}
private void wireInCallableStatement(Connection connection, ResultSet resultSet, CallableStatement callableStatement) throws SQLException {
when(connection.prepareCall(anyString())).thenReturn(callableStatement);
when(connection.prepareCall(anyString(), anyInt(), anyInt())).thenReturn(callableStatement);
when(connection.prepareCall(anyString(), anyInt(), anyInt(), anyInt())).thenReturn(callableStatement);
when(callableStatement.executeQuery()).thenReturn(resultSet);
when(callableStatement.executeQuery(anyString())).thenReturn(resultSet);
}
public void setResultSet(Row... rows) throws SQLException {
final Answer answerFromResultSet = new SyntheticResultSetAnswer(rows);
given(resultSet.next()).will(answerFromResultSet);
given(resultSet.getArray(anyInt())).will(answerFromResultSet);
given(resultSet.getAsciiStream(anyInt())).will(answerFromResultSet);
given(resultSet.getBigDecimal(anyInt())).will(answerFromResultSet);
given(resultSet.getBinaryStream(anyInt())).will(answerFromResultSet);
given(resultSet.getBlob(anyInt())).will(answerFromResultSet);
given(resultSet.getBoolean(anyInt())).will(answerFromResultSet);
given(resultSet.getByte(anyInt())).will(answerFromResultSet);
given(resultSet.getBytes(anyInt())).will(answerFromResultSet);
given(resultSet.getCharacterStream(anyInt())).will(answerFromResultSet);
given(resultSet.getClob(anyInt())).will(answerFromResultSet);
given(resultSet.getDate(anyInt())).will(answerFromResultSet);
given(resultSet.getDouble(anyInt())).will(answerFromResultSet);
given(resultSet.getFloat(anyInt())).will(answerFromResultSet);
given(resultSet.getInt(anyInt())).will(answerFromResultSet);
given(resultSet.getLong(anyInt())).will(answerFromResultSet);
given(resultSet.getNCharacterStream(anyInt())).will(answerFromResultSet);
given(resultSet.getNClob(anyInt())).will(answerFromResultSet);
given(resultSet.getNString(anyInt())).will(answerFromResultSet);
given(resultSet.getObject(anyInt())).will(answerFromResultSet);
given(resultSet.getObject(anyInt(), anyMap())).will(answerFromResultSet);
given(resultSet.getObject(anyInt(), any(Class.class))).will(answerFromResultSet);
given(resultSet.getRef(anyInt())).will(answerFromResultSet);
given(resultSet.getRowId(anyInt())).will(answerFromResultSet);
given(resultSet.getShort(anyInt())).will(answerFromResultSet);
given(resultSet.getSQLXML(anyInt())).will(answerFromResultSet);
given(resultSet.getString(anyInt())).will(answerFromResultSet);
given(resultSet.getTime(anyInt())).will(answerFromResultSet);
given(resultSet.getTime(anyInt(), any(Calendar.class))).will(answerFromResultSet);
given(resultSet.getTimestamp(anyInt())).will(answerFromResultSet);
given(resultSet.getTimestamp(anyInt(), any(Calendar.class))).will(answerFromResultSet);
given(resultSet.getURL(anyInt())).will(answerFromResultSet);
given(resultSet.getBigDecimal(anyInt(), anyInt())).will(answerFromResultSet);
given(resultSet.getUnicodeStream(anyInt())).will(answerFromResultSet);
}
}
|
package jena;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import java.util.ArrayList;
@RestController
@RequestMapping("/statements")
public class JenaTestNodeController {
@Autowired
private StatementParserImpl statementParser;
@RequestMapping(value= "/{id}", method = RequestMethod.GET, produces = "application/json")
@ResponseBody
public JenaStatement getStatement(@PathVariable("id") int id) {
return statementParser.getStatement(id);
}
}
|
package moe.banana.jsonapi2;
import java.io.Serializable;
import java.lang.reflect.Array;
import java.util.Iterator;
public final class HasMany<T extends Resource> implements Relationship, Iterable<T>, Serializable {
public final ResourceLinkage[] linkages;
private final Class<T> type;
private final Resource resource;
HasMany(Class<T> type, Resource resource, ResourceLinkage[] linkages) {
this.type = type;
this.resource = resource;
this.linkages = linkages;
}
@Deprecated
public T[] get() throws ResourceNotFoundException {
return getAll();
}
@SuppressWarnings("unchecked")
public T[] getAll() throws ResourceNotFoundException {
T[] array = (T[]) Array.newInstance(type, linkages.length);
for (int i = 0; i != linkages.length; i++) {
array[i] = (T) resource._doc.find(linkages[i]);
}
return array;
}
/**
* Iterates over linked resources.
* @return iterator whose {@link Iterator#next()} returns linked Resource or null if linkage cannot be resolved with document.
*/
@Override
public Iterator<T> iterator() {
return new Iterator<T>() {
int i = 0;
@Override
public boolean hasNext() {
return linkages != null && i != linkages.length;
}
@Override
@SuppressWarnings("unchecked")
public T next() {
try {
return (T) resource._doc.find(linkages[i++]);
} catch (ResourceNotFoundException e) {
return null;
}
}
};
}
@SuppressWarnings("unchecked")
public static <T extends Resource> HasMany<T> create(Resource resource, T... linked) {
ResourceLinkage[] linkages = new ResourceLinkage[linked.length];
for (int i = 0; i != linkages.length; i++) {
linkages[i] = ResourceLinkage.of(linked[i]);
}
return create(resource, (Class<T>) linked.getClass().getComponentType(), linkages);
}
public static HasMany<? extends Resource> create(Resource resource, ResourceLinkage... linkage) {
return create(resource, Resource.class, linkage);
}
public static <T extends Resource> HasMany<T> create(Resource resource, Class<T> componentType, ResourceLinkage... linkage) {
return new HasMany<>(componentType, resource, linkage);
}
}
|
package net.imagej.legacy;
import ij.Executer;
import ij.IJ;
import ij.ImageJ;
import ij.ImagePlus;
import ij.Macro;
import ij.Menus;
import ij.WindowManager;
import ij.gui.ImageWindow;
import ij.gui.Toolbar;
import ij.io.Opener;
import ij.macro.Interpreter;
import ij.plugin.Commands;
import ij.plugin.PlugIn;
import ij.plugin.filter.PlugInFilter;
import ij.plugin.filter.PlugInFilterRunner;
import ij.plugin.frame.PlugInDialog;
import ij.plugin.frame.PlugInFrame;
import ij.text.TextWindow;
import java.awt.Component;
import java.awt.Image;
import java.awt.Menu;
import java.awt.MenuBar;
import java.awt.MenuItem;
import java.awt.Window;
import java.awt.image.ImageProducer;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.swing.SwingUtilities;
import net.imagej.display.ImageDisplay;
import net.imagej.patcher.LegacyHooks;
import org.scijava.AbstractContextual;
import org.scijava.Context;
import org.scijava.MenuEntry;
import org.scijava.MenuPath;
import org.scijava.event.EventHandler;
import org.scijava.log.LogService;
import org.scijava.module.ModuleInfo;
import org.scijava.platform.event.AppAboutEvent;
import org.scijava.platform.event.AppOpenFilesEvent;
import org.scijava.platform.event.AppPreferencesEvent;
import org.scijava.platform.event.AppQuitEvent;
import org.scijava.plugin.Parameter;
import org.scijava.script.ScriptService;
import org.scijava.util.ClassUtils;
/**
* A helper class to interact with ImageJ 1.x.
* <p>
* The DefaultLegacyService needs to patch ImageJ 1.x's classes before they are
* loaded. Unfortunately, this is tricky: if the DefaultLegacyService already
* uses those classes, it is a matter of luck whether we can get the patches in
* before those classes are loaded.
* </p>
* <p>
* Therefore, we put as much interaction with ImageJ 1.x as possible into this
* class and keep a reference to it in the DefaultLegacyService.
* </p>
*
* @author Johannes Schindelin
*/
public class IJ1Helper extends AbstractContextual {
/** A reference to the legacy service, just in case we need it */
private final DefaultLegacyService legacyService;
@Parameter
private LogService log;
public IJ1Helper(final DefaultLegacyService legacyService) {
setContext(legacyService.getContext());
this.legacyService = legacyService;
}
public void initialize() {
// initialize legacy ImageJ application
final ImageJ ij1 = IJ.getInstance();
if (Menus.getCommands() == null) {
IJ.runPlugIn("ij.IJ.init", "");
}
if (ij1 != null) {
// make sure that the Event Dispatch Thread's class loader is set
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
Thread.currentThread().setContextClassLoader(IJ.getClassLoader());
}
});
final LegacyImageMap imageMap = legacyService.getImageMap();
for (int i = 1; i <= WindowManager.getImageCount(); i++) {
imageMap.registerLegacyImage(WindowManager.getImage(i));
}
// set icon and title of main window (which are instantiated before the
// initializer is called)
try {
final LegacyHooks hooks =
(LegacyHooks) IJ.class.getField("_hooks").get(null);
ij1.setTitle(hooks.getAppName());
final URL iconURL = hooks.getIconURL();
if (iconURL != null) try {
final Object producer = iconURL.getContent();
final Image image = ij1.createImage((ImageProducer) producer);
ij1.setIconImage(image);
if (IJ.isMacOSX()) try {
// NB: We also need to set the dock icon
final Class<?> clazz = Class.forName("com.apple.eawt.Application");
final Object app = clazz.getMethod("getApplication").invoke(null);
clazz.getMethod("setDockIconImage", Image.class).invoke(app, image);
}
catch (final Throwable t) {
t.printStackTrace();
}
}
catch (final IOException e) {
IJ.handleException(e);
}
}
catch (final Throwable t) {
t.printStackTrace();
}
// FIXME: handle window location via LegacyUI
// This is necessary because the ImageJ 1.x window will not set its location
// if created with mode NO_SHOW, which is exactly how it is created right
// now by the legacy layer. This is a work-around by ensuring the preferred
// (e.g. saved and loaded) location is current at the time the IJ1Helper
// is initialized. Ideally we would like to handle positioning via
// the LegacyUI though, so that we can restore positions on secondary
// monitors and such.
ij1.setLocation(ij1.getPreferredLocation());
}
}
public void dispose() {
final ImageJ ij = IJ.getInstance();
if (ij != null) {
Runnable run = new Runnable() {
@Override
public void run() {
// close out all image windows, without dialog prompts
while (true) {
final ImagePlus imp = WindowManager.getCurrentImage();
if (imp == null) break;
imp.changes = false;
imp.close();
}
}
};
if (SwingUtilities.isEventDispatchThread()) {
run.run();
} else try {
SwingUtilities.invokeAndWait(run);
} catch (Exception e) {
// report & ignore
e.printStackTrace();
}
// We need to ensure all the non-image windows are closed in ImageJ 1.x.
// This is a non-trivial problem, as WindowManager#getNonImageWindows()
// will ONLY return Frames. However there are non-Frame, non-Image Windows
// that are critical to close: for example, the ContrastAdjuster spawns
// a polling thread to do its work, which will continue to run until the
// ContrastAdjuster is explicitly closed.
// As of v1.49b, getNonImageTitles is not restricted to Frames, so we must
// use titles to iterate through the available windows.
for (String title : WindowManager.getNonImageTitles()) {
// Titles are not unique in IJ1, but since we are iterating through all
// the available Windows, duplicates will each get handled eventually.
final Window win = WindowManager.getWindow(title);
// Copied from ij.plugin.Commands.close. These are subclasses of
// java.awt.Window that added close methods. These methods absolutely
// need to be called - the only reason this is working in ImageJ 1.x
// right now is that ImageJ.exitWhenQuitting is always true, as there
// is nothing setting it to false. So System.exit(0) is called and it
// doesn't matter that there may be unclosed windows or utility threads
// running.
// In ImageJ2 however we need to ensure these windows are properly shut
// down.
// Note that we can NOT set these windows as active and run the Commands
// plugin with argument "close", because the default behavior is to
// try closing the window as an Image. As we know these are not Images,
// that is never the right thing to do.
if (win instanceof PlugInFrame)
((PlugInFrame)win).close();
else if (win instanceof PlugInDialog)
((PlugInDialog)win).close();
else if (win instanceof TextWindow)
((TextWindow)win).close();
// Ensure the WindowManager has removed the current window and it has
// been disposed. This may cause double disposal, but as far as we know
// that is OK.
WindowManager.removeWindow(win);
win.dispose();
}
// quit legacy ImageJ on the same thread
ij.run();
}
}
/** Add name aliases for ImageJ1 classes to the ScriptService. */
public void addAliases(final ScriptService scriptService) {
scriptService.addAlias(ImagePlus.class);
}
public boolean isVisible() {
final ImageJ ij = IJ.getInstance();
if (ij == null) return false;
return ij.isVisible();
}
private boolean batchMode;
void setBatchMode(boolean batch) {
Interpreter.batchMode = batch;
batchMode = batch;
}
void invalidateInstance() {
try {
final Method cleanup = IJ.class.getDeclaredMethod("cleanup");
cleanup.setAccessible(true);
cleanup.invoke(null);
} catch (Throwable t) {
t.printStackTrace();
legacyService.log().error(t);
}
}
public void setVisible(boolean toggle) {
if (batchMode) return;
final ImageJ ij = IJ.getInstance();
if (ij != null) {
if (toggle) ij.pack();
ij.setVisible(toggle);
}
// hide/show the legacy ImagePlus instances
final LegacyImageMap imageMap = legacyService.getImageMap();
for (final ImagePlus imp : imageMap.getImagePlusInstances()) {
final ImageWindow window = imp.getWindow();
if (window != null) window.setVisible(toggle);
}
}
public void syncActiveImage(final ImageDisplay activeDisplay) {
final LegacyImageMap imageMap = legacyService.getImageMap();
final ImagePlus activeImagePlus = imageMap.lookupImagePlus(activeDisplay);
// NB - old way - caused probs with 3d Project
// WindowManager.setTempCurrentImage(activeImagePlus);
// NB - new way - test thoroughly
if (activeImagePlus == null) WindowManager.setCurrentWindow(null);
else WindowManager.setCurrentWindow(activeImagePlus.getWindow());
}
public void setKeyDown(int keyCode) {
IJ.setKeyDown(keyCode);
}
public void setKeyUp(int keyCode) {
IJ.setKeyUp(keyCode);
}
public boolean hasInstance() {
return IJ.getInstance() != null;
}
public String getVersion() {
return ImageJ.VERSION;
}
public boolean isMacintosh() {
return IJ.isMacintosh();
}
public void setStatus(String message) {
IJ.showStatus(message);
}
public void setProgress(int val, int max) {
IJ.showProgress(val, max);
}
public Component getToolBar() {
return Toolbar.getInstance();
}
public ImageJ getIJ() {
if (hasInstance()) {
return IJ.getInstance();
}
return null;
}
public void showMessage(String title, String message) {
IJ.showMessage(title, message);
}
public boolean showMessageWithCancel(String title, String message) {
return IJ.showMessageWithCancel(title, message);
}
public String commandsName() {
return Commands.class.getName();
}
public void updateRecentMenu(final String path) {
Menu menu = Menus.getOpenRecentMenu();
if (menu == null) return;
int n = menu.getItemCount();
int index = -1;
for (int i=0; i<n; i++) {
if (menu.getItem(i).getLabel().equals(path)) {
index = i;
break;
}
}
// Move to most recent
if (index > 0) {
final MenuItem item = menu.getItem(index);
menu.remove(index);
menu.insert(item, 0);
}
// not found, so replace oldest
else if (index < 0) {
int count = menu.getItemCount();
if (count >= Menus.MAX_OPEN_RECENT_ITEMS) {
menu.remove(count - 1);
}
final MenuItem item = new MenuItem(path);
final ImageJ instance = IJ.getInstance();
if (instance != null) item.addActionListener(instance);
menu.insert(item, 0);
}
// if index was 0, already at the head so do nothing
}
/**
* Gets a macro parameter of type <i>boolean</i>.
*
* @param label
* the name of the macro parameter
* @param defaultValue
* the default value
* @return the boolean value
*/
public static boolean getMacroParameter(String label, boolean defaultValue) {
return getMacroParameter(label) != null || defaultValue;
}
/**
* Gets a macro parameter of type <i>double</i>.
*
* @param label
* the name of the macro parameter
* @param defaultValue
* the default value
* @return the double value
*/
public static double getMacroParameter(String label, double defaultValue) {
String value = Macro.getValue(Macro.getOptions(), label, null);
return value != null ? Double.parseDouble(value) : defaultValue;
}
/**
* Gets a macro parameter of type {@link String}.
*
* @param label
* the name of the macro parameter
* @param defaultValue
* the default value
* @return the value
*/
public static String getMacroParameter(String label, String defaultValue) {
return Macro.getValue(Macro.getOptions(), label, defaultValue);
}
/**
* Gets a macro parameter of type {@link String}.
*
* @param label
* the name of the macro parameter
* @return the value, <code>null</code> if the parameter was not specified
*/
public static String getMacroParameter(String label) {
return Macro.getValue(Macro.getOptions(), label, null);
}
/** Gets the SciJava application context linked to the ImageJ 1.x instance. */
public static Context getLegacyContext() {
// NB: This call instantiates a Context if there is none.
// IJ.runPlugIn() will be intercepted by the legacy hooks if they are
// installed and return the current Context.
// If no legacy hooks are installed, ImageJ 1.x will instantiate the Context
// using the PluginClassLoader and the LegacyService will install the legacy
// hooks.
final Object o = IJ.runPlugIn("org.scijava.Context", "");
if (o == null) return null;
if (!(o instanceof Context)) {
throw new IllegalStateException("Unexpected type of context: " +
o.getClass().getName());
}
return (Context) o;
}
/**
* Replacement for ImageJ 1.x' MacAdapter.
* <p>
* ImageJ 1.x has a MacAdapter plugin that intercepts MacOSX-specific events
* and handles them. The way it does it is deprecated now, however, and
* unfortunately incompatible with the way ImageJ 2's platform service does
* it.
* </p>
* <p>
* This class implements the same functionality as the MacAdapter, but in a
* way that is compatible with ImageJ 2's platform service.
* </p>
* @author Johannes Schindelin
*/
private static class LegacyEventDelegator extends AbstractContextual {
@Parameter(required = false)
private LegacyService legacyService;
// -- MacAdapter re-implementations --
@EventHandler
private void onEvent(final AppAboutEvent event)
{
if (isLegacyMode()) {
IJ.run("About ImageJ...");
}
}
@EventHandler
private void onEvent(final AppOpenFilesEvent event) {
if (isLegacyMode()) {
final List<File> files = new ArrayList<File>(event.getFiles());
for (final File file : files) {
new Opener().openAndAddToRecent(file.getAbsolutePath());
}
}
}
@EventHandler
private void onEvent(final AppQuitEvent event) {
if (isLegacyMode()) {
new Executer("Quit", null); // works with the CommandListener
}
}
@EventHandler
private void onEvent(final AppPreferencesEvent event)
{
if (isLegacyMode()) {
IJ.error("The ImageJ preferences are in the Edit>Options menu.");
}
}
private boolean isLegacyMode() {
// We call setContext() indirectly from DefaultLegacyService#initialize,
// therefore legacyService might still be null at this point even if the
// context knows a legacy service now.
if (legacyService == null) {
final Context context = getContext();
if (context != null) legacyService = context.getService(LegacyService.class);
}
return legacyService != null && legacyService.isLegacyMode();
}
}
private static LegacyEventDelegator eventDelegator;
public static void subscribeEvents(final Context context) {
if (context == null) {
eventDelegator = null;
} else {
eventDelegator = new LegacyEventDelegator();
eventDelegator.setContext(context);
IJ.showStatus("");
}
}
static void run(Class<?> c) {
IJ.resetEscape();
if (PlugIn.class.isAssignableFrom(c)) {
try {
final PlugIn plugin = (PlugIn) c.newInstance();
plugin.run("");
} catch (Exception e) {
throw e instanceof RuntimeException ? (RuntimeException) e
: new RuntimeException(e);
}
return;
}
if (PlugInFilter.class.isAssignableFrom(c)) {
try {
final PlugInFilter plugin = (PlugInFilter) c.newInstance();
ImagePlus image = WindowManager.getCurrentImage();
if (image != null && image.isLocked()) {
if (!IJ.showMessageWithCancel("Unlock image?", "The image '" + image.getTitle()
+ "'appears to be locked... Unlock?"))
return;
image.unlock();
}
new PlugInFilterRunner(plugin, c.getName(), "");
} catch (Exception e) {
throw e instanceof RuntimeException ? (RuntimeException) e
: new RuntimeException(e);
}
return;
}
throw new RuntimeException("TODO: construct class loader");
}
private boolean menuInitialized;
/**
* Adds legacy-compatible scripts and commands to the ImageJ1 menu structure.
*/
public synchronized void addMenuItems() {
if (menuInitialized) return;
final Map<String, ModuleInfo> modules =
legacyService.getScriptsAndNonLegacyCommands();
@SuppressWarnings("unchecked")
final Hashtable<String, String> ij1Commands = Menus.getCommands();
final ImageJ ij1 = getIJ();
final IJ1MenuWrapper wrapper = ij1 == null ? null : new IJ1MenuWrapper(ij1);
for (final Entry<String, ModuleInfo> entry : modules.entrySet()) {
final String key = entry.getKey();
final ModuleInfo info = entry.getValue();
final MenuEntry leaf = info.getMenuPath().getLeaf();
if (leaf == null) continue;
final MenuPath path = info.getMenuPath();
final String name = leaf.getName();
if (ij1Commands.containsKey(name)) {
legacyService.log().info("Overriding " + name
+ "; class: " + info.getDelegateClassName()
+ "; jar: " + ClassUtils.getLocation(info.getDelegateClassName()));
if (wrapper != null) try {
wrapper.create(path, true);
}
catch (final Throwable t) {
legacyService.log().error(t);
}
}
else if (wrapper != null) try {
wrapper.create(path, false);
}
catch (final Throwable t) {
legacyService.log().error(t);
}
ij1Commands.put(name, key);
}
menuInitialized = true;
}
/**
* Helper class for wrapping ImageJ2 menu paths to ImageJ1 {@link Menu}
* structures, and inserting them into the proper positions of the
* {@link MenuBar}.
*/
private static class IJ1MenuWrapper {
final ImageJ ij1;
final MenuBar menuBar = Menus.getMenuBar();
final Map<String, Menu> structure = new HashMap<String, Menu>();
private IJ1MenuWrapper(final ImageJ ij1) {
this.ij1 = ij1;
}
/**
* Creates a {@link MenuItem} matching the structure of the provided path.
* Expected path structure is:
* <p>
* <ul>Level1 > Level2 > ... > Leaf entry</ul>
* </p>
* <p>
* For example, a valid path would be:
* </p>
* <p>
* <ul>Edit > Options > ImageJ2 plugins > Discombobulator</ul>
* </p>
*/
private MenuItem create(final MenuPath path, final boolean reuseExisting) {
// Find the menu structure where we can insert our command.
// NB: size - 1 is the leaf position, so we want to go to size - 2 to
// find the parent menu location
final Menu menu = getParentMenu(path, path.size() - 2);
final String label = path.getLeaf().getName();
// If we are overriding an item, find the item being overridden
if (reuseExisting) {
for (int i = 0; i < menu.getItemCount(); i++) {
final MenuItem item = menu.getItem(i);
if (label.equals(item.getLabel())) {
return item;
}
}
}
// Otherwise, we are creating a new item
final MenuItem item = new MenuItem(label);
menu.insert(item, getIndex(menu, label));
item.addActionListener(ij1);
return item;
}
/**
* Helper method to look up special cases for menu weighting
*/
private int getIndex(Menu menu, String label) {
// Place export sub-menu after import sub-menu
if (menu.getLabel().equals("File") && label.equals("Export")) {
for (int i=0; i<menu.getItemCount(); i++) {
final MenuItem menuItem = menu.getItem(i);
if (menuItem.getLabel().equals("Import")) return i + 1;
}
}
//TODO pass and use actual command weight from IJ2.. maybe?
// No special case: append to end of menu
return menu.getItemCount();
}
/**
* Recursive helper method to builds the final {@link Menu} structure.
*/
private Menu getParentMenu(final MenuPath menuPath, int depth) {
final MenuEntry currentItem = menuPath.get(depth);
final String currentLabel = currentItem.getName();
// Check to see if we already know the menu associated with the desired
// label/path
final Menu cached = structure.get(currentLabel);
if (cached != null) return cached;
// We are at the root of the menu, so see if we have a matching menu
if (depth == 0) {
// Special case check the help menu
if ("Help".equals(currentLabel)) {
final Menu menu = menuBar.getHelpMenu();
structure.put(currentLabel, menu);
return menu;
}
// Check the other menus of the menu bar to see if our desired label
// already exists
for (int i = 0; i < menuBar.getMenuCount(); i++) {
final Menu menu = menuBar.getMenu(i);
if (currentLabel.equals(menu.getLabel())) {
structure.put(currentLabel, menu);
return menu;
}
}
// Didn't find a match so we have to create a new menu entry
final Menu menu = new Menu(currentLabel);
menuBar.add(menu);
structure.put(currentLabel, menu);
return menu;
}
final Menu parent = getParentMenu(menuPath, depth - 1);
// Once the parent of this entry is obtained, we need to check if it
// already contains the current entry.
for (int i = 0; i < parent.getItemCount(); i++) {
final MenuItem item = parent.getItem(i);
if (currentLabel.equals(item.getLabel())) {
if (item instanceof Menu) {
// Found a menu entry that matches our desired label, so return
final Menu menu = (Menu) item;
structure.put(currentLabel, menu);
return menu;
}
// Found a match but it was an existing non-menu item, so our menu
// structure is invalid.
//TODO consider mangling the IJ2 menu name instead...
throw new IllegalArgumentException("Not a menu: " + currentLabel);
}
}
// An existing entry in the parent menu was not found, so we need to
// create a new entry.
final Menu menu = new Menu(currentLabel);
parent.insert(menu, getIndex(parent, menu.getLabel()));
structure.put(currentLabel, menu);
return menu;
}
}
/**
* Evaluates the specified macro.
*
* @param macro the macro to evaluate
* @return the return value
*/
public String runMacro(final String macro) {
return IJ.runMacro(macro);
}
/**
* Evaluates the specified macro.
*
* @param path the macro file to evaluate
* @param arg the macro argument
* @return the return value
*/
public String runMacroFile(final String path, final String arg) {
return IJ.runMacroFile(path, arg);
}
/**
* Opens an image using ImageJ 1.x.
*
* @param path the image file to open
* @return the image
*/
public Object openImage(final String path) {
return IJ.openImage(path);
}
/**
* Opens a path using ImageJ 1.x, bypassing the (javassisted) IJ utility
* class.
*
* @param path the image file to open
*/
public void openPathDirectly(final String path) {
new Opener().open(path);
}
/**
* Enables or disables ImageJ 1.x' debug mode.
*
* @param debug whether to show debug messages or not
*/
public void setDebugMode(final boolean debug) {
IJ.debugMode = debug;
}
/**
* Delegate exception handling to ImageJ 1.x.
*
* @param e the exception to handle
*/
public void handleException(Throwable e) {
IJ.handleException(e);;
}
/**
* Ask ImageJ 1.x whether it thinks whether the Shift key is held down.
*
* @return whether the Shift key is considered <i>down</i>
*/
public boolean shiftKeyDown() {
return IJ.shiftKeyDown();
}
}
|
package org.basex.query;
import static org.basex.query.QueryText.*;
import static org.basex.query.util.Err.*;
import static org.basex.util.Token.*;
import static org.basex.util.ft.FTFlag.*;
import java.io.*;
import java.util.*;
import org.basex.core.*;
import org.basex.io.*;
import org.basex.io.serial.*;
import org.basex.query.expr.*;
import org.basex.query.expr.CmpG.OpG;
import org.basex.query.expr.CmpN.OpN;
import org.basex.query.expr.CmpV.OpV;
import org.basex.query.expr.Expr.*;
import org.basex.query.expr.Context;
import org.basex.query.expr.List;
import org.basex.query.flwor.*;
import org.basex.query.ft.*;
import org.basex.query.ft.FTWords.FTMode;
import org.basex.query.func.*;
import org.basex.query.item.*;
import org.basex.query.item.SeqType.Occ;
import org.basex.query.iter.*;
import org.basex.query.path.*;
import org.basex.query.up.expr.*;
import org.basex.query.util.*;
import org.basex.query.util.format.*;
import org.basex.util.*;
import org.basex.util.ft.*;
import org.basex.util.hash.*;
import org.basex.util.list.*;
public class QueryParser extends InputParser {
/** QName check: URI is mandatory. */
private static final byte[] URICHECK = {};
/** QName check: skip namespace check. */
private static final byte[] SKIPCHECK = {};
/** Reserved function names (sorted). */
private static final byte[][] KEYWORDS = {
NodeType.ATT.string(), NodeType.COM.string(), NodeType.DOC.string(),
NodeType.ELM.string(), AtomType.EMP.string(), FuncType.ANY_FUN.string(),
token(IF), AtomType.ITEM.string(),
NodeType.NSP.string(), NodeType.NOD.string(), NodeType.PI.string(),
token(SCHEMA_ATTRIBUTE), token(SCHEMA_ELEMENT), token(SWITCH),
NodeType.TXT.string(), token(TYPESWITCH)
};
/** Query context. */
final QueryContext ctx;
/** Temporary token builder. */
private final TokenBuilder tok = new TokenBuilder();
/** Modules loaded by the current file. */
private final TokenList modules = new TokenList();
/** Name of current module. */
private QNm module;
/** Alternative error output. */
private Err alter;
/** Alternative error description. */
private QNm alterFunc;
/** Alternative position. */
private int ap;
/** Declared serialization options. */
private final StringList serial = new StringList();
/** Declaration flag. */
private boolean declElem;
/** Declaration flag. */
private boolean declFunc;
/** Declaration flag. */
private boolean declColl;
/** Declaration flag. */
private boolean declConstr;
/** Declaration flag. */
private boolean declSpaces;
/** Declaration flag. */
private boolean declOrder;
/** Declaration flag. */
private boolean declReval;
/** Declaration flag. */
private boolean declGreat;
/** Declaration flag. */
private boolean declPres;
/** Declaration flag. */
private boolean declBase;
/** Declaration flag. */
private boolean declItem;
/** Declaration flag. */
private boolean declVars;
/** Cached QNames. */
private final ArrayList<QNmCheck> names = new ArrayList<QNmCheck>();
/**
* Constructor.
* @param in input
* @param c query context
* @throws QueryException query exception
*/
public QueryParser(final String in, final QueryContext c) throws QueryException {
super(in);
ctx = c;
// parse pre-defined external variables
final String bind = ctx.context.prop.get(Prop.BINDINGS).trim();
final StringBuilder key = new StringBuilder();
final StringBuilder val = new StringBuilder();
boolean first = true;
final int sl = bind.length();
for(int s = 0; s < sl; s++) {
final char ch = bind.charAt(s);
if(first) {
if(ch == '=') {
first = false;
} else {
key.append(ch);
}
} else {
if(ch == ',') {
if(s + 1 == sl || bind.charAt(s + 1) != ',') {
bind(key, val);
key.setLength(0);
val.setLength(0);
first = true;
continue;
}
// commas are escaped by a second comma
s++;
}
val.append(ch);
}
}
bind(key, val);
}
/**
* Binds the specified variable.
* If a URI is specified, the query is treated as a module.
* @param key key
* @param val value
* @throws QueryException query exception
*/
private void bind(final StringBuilder key, final StringBuilder val)
throws QueryException {
final String k = key.toString().trim();
if(!k.isEmpty()) ctx.bind(k, new Atm(val.toString()), null);
}
/**
* Parses the specified query or module.
* If the specified uri is {@code null}, the query is parsed as main module.
* @param uri module uri.
* @return resulting expression, or the name of the module
* @throws QueryException query exception
*/
public final Expr parse(final byte[] uri) throws QueryException {
file(ctx.sc.baseIO(), ctx.context);
if(!more()) error(QUERYEMPTY);
// checks if the query string contains invalid characters
for(int i = 0; i < il;) {
// only retrieve code points for large character codes (faster)
int cp = input.charAt(i);
final boolean hs = cp >= Character.MIN_HIGH_SURROGATE;
if(hs) cp = input.codePointAt(i);
if(!XMLToken.valid(cp)) {
ip = i;
error(QUERYINV, cp);
}
i += hs ? Character.charCount(cp) : 1;
}
final Expr expr = module(uri);
if(more()) {
if(alter != null) error();
final String rest = rest();
ip++;
if(uri != null) error(MODEXPR, rest);
error(QUERYEND, rest);
}
// completes the parsing step
assignURI(0);
ctx.funcs.check();
ctx.vars.checkUp();
if(ctx.sc.nsElem != null) ctx.sc.ns.add(EMPTY, ctx.sc.nsElem, null);
// set default decimal format
final byte[] empty = new QNm(EMPTY).eqname();
if(ctx.sc.decFormats.get(empty) == null) {
ctx.sc.decFormats.add(empty, new DecFormatter());
}
return expr;
}
/**
* Parses the specified query and starts with the "Module" rule.
* If the specified uri is {@code null}, the query is parsed as main module.
* @param u module uri
* @return resulting expression
* @throws QueryException query exception
*/
public final Expr module(final byte[] u) throws QueryException {
try {
Expr expr = null;
versionDecl();
if(u == null) {
final int i = ip;
if(wsConsumeWs(MODULE, NSPACE, null)) error(MAINMOD);
ip = i;
expr = mainModule();
if(expr == null) {
if(alter != null) error();
else error(EXPREMPTY);
}
} else {
expr = moduleDecl(u);
}
return expr;
} catch(final QueryException ex) {
mark();
ex.pos(this);
throw ex;
}
}
/**
* Parses the "VersionDecl" rule.
* @throws QueryException query exception
*/
private void versionDecl() throws QueryException {
final int i = ip;
if(!wsConsumeWs(XQUERY)) return;
final boolean version = wsConsumeWs(VERSION);
if(version) {
// parse xquery version
final String ver = string(stringLiteral());
if(ver.equals(XQ10)) ctx.xquery3 = false;
else if(eq(ver, XQ11, XQ30)) ctx.xquery3 = true;
else error(XQUERYVER, ver);
}
// parse xquery encoding (ignored, as input always comes in as string)
if((version || ctx.xquery3) && wsConsumeWs(ENCODING)) {
final String enc = string(stringLiteral());
if(!supported(enc)) error(XQUERYENC2, enc);
} else if(!version) {
ip = i;
return;
}
wsCheck(";");
}
/**
* Parses the "MainModule" rule.
* Parses the "Setter" rule.
* Parses the "QueryBody (= Expr)" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr mainModule() throws QueryException {
prolog1();
prolog2();
return expr();
}
/**
* Parses the "ModuleDecl" rule.
* @param u module uri (may be empty)
* @return name of module
* @throws QueryException query exception
*/
private QNm moduleDecl(final byte[] u) throws QueryException {
wsCheck(MODULE);
wsCheck(NSPACE);
skipWS();
final byte[] name = ncName(XPNAME);
wsCheck(IS);
final byte[] uri = stringLiteral();
if(uri.length == 0) error(NSMODURI);
module = new QNm(name, uri);
ctx.sc.ns.add(name, uri, info());
skipWS();
check(';');
prolog1();
prolog2();
// check if import and declaration uri match
// skip test if module has not been imported (in this case, URI is empty)
if(u.length != 0 && !eq(u, uri)) error(WRONGMODULE, module.uri(), file);
return module;
}
/**
* Parses the "Prolog" rule.
* Parses the "Setter" rule.
* @throws QueryException query exception
*/
private void prolog1() throws QueryException {
while(true) {
final int i = ip;
if(wsConsumeWs(DECLARE)) {
if(wsConsumeWs(DEFAULT)) {
if(!defaultNamespaceDecl() && !defaultCollationDecl() &&
!emptyOrderDecl() && !(ctx.xquery3 && decimalFormatDecl(true)))
error(DECLINCOMPLETE);
} else if(wsConsumeWs(BOUNDARY_SPACE)) {
boundarySpaceDecl();
} else if(wsConsumeWs(BASE_URI)) {
baseURIDecl();
} else if(wsConsumeWs(CONSTRUCTION)) {
constructionDecl();
} else if(wsConsumeWs(ORDERING)) {
orderingModeDecl();
} else if(wsConsumeWs(REVALIDATION)) {
revalidationDecl();
} else if(wsConsumeWs(COPY_NAMESPACES)) {
copyNamespacesDecl();
} else if(ctx.xquery3 && wsConsumeWs(DECIMAL_FORMAT)) {
decimalFormatDecl(false);
} else if(wsConsumeWs(NSPACE)) {
namespaceDecl();
} else if(wsConsumeWs(FT_OPTION)) {
final FTOpt fto = new FTOpt();
while(ftMatchOption(fto));
ctx.ftOpt().copy(fto);
} else {
ip = i;
return;
}
} else if(wsConsumeWs(IMPORT)) {
if(wsConsumeWs(SCHEMA)) {
schemaImport();
} else if(wsConsumeWs(MODULE)) {
moduleImport();
} else {
ip = i;
return;
}
} else {
return;
}
skipWS();
check(';');
}
}
/**
* Parses the "Prolog" rule.
* @throws QueryException query exception
*/
private void prolog2() throws QueryException {
while(true) {
final int i = ip;
if(!wsConsumeWs(DECLARE)) return;
if(ctx.xquery3 && wsConsumeWs(CONTEXT)) {
contextItemDecl();
} else if(wsConsumeWs(OPTION)) {
optionDecl();
} else if(wsConsumeWs(DEFAULT)) {
error(PROLOGORDER);
} else {
final Ann ann = new Ann();
while(true) {
if(wsConsumeWs(UPDATING)) {
addAnnotation(ann, Ann.UPDATING, Empty.SEQ);
} else if(ctx.xquery3 && consume('%')) {
annotation(ann);
} else {
break;
}
}
if(wsConsumeWs(VARIABLE)) {
// variables cannot be updating
if(ann.contains(Ann.UPDATING)) error(UPDATINGVAR);
varDecl(ann);
} else if(wsConsumeWs(FUNCTION)) {
functionDecl(ann);
} else if(!ann.isEmpty()) {
error(VARFUNC);
} else {
ip = i;
return;
}
}
skipWS();
check(';');
}
}
/**
* Parses the "Annotation" rule.
* @return annotations
* @throws QueryException query exception
*/
private Ann annotations() throws QueryException {
final Ann ann = new Ann();
while(wsConsume("%")) annotation(ann);
skipWS();
return ann;
}
/**
* Parses a single annotation.
* @param ann annotations
* @throws QueryException query exception
*/
private void annotation(final Ann ann) throws QueryException {
final QNm name = eQName(QNAMEINV, ctx.sc.nsFunc);
final ValueBuilder vb = new ValueBuilder(1);
if(wsConsumeWs(PAR1)) {
do {
final Item it = literal();
if(it == null) error(ANNVALUE);
vb.add(it);
} while(wsConsumeWs(COMMA));
wsCheck(PAR2);
}
skipWS();
addAnnotation(ann, name, vb.value());
}
/**
* Adds a single annotation.
* @param ann annotations
* @param name name
* @param value value
* @throws QueryException query exception
*/
private void addAnnotation(final Ann ann, final QNm name, final Value value)
throws QueryException {
if(name.eq(Ann.UPDATING)) {
if(ann.contains(Ann.UPDATING)) error(DUPLUPD);
} else if(name.eq(Ann.PUBLIC) || name.eq(Ann.PRIVATE)) {
// only one visibility modifier allowed
if(ann.contains(Ann.PUBLIC) || ann.contains(Ann.PRIVATE)) error(DUPLVIS);
} else if(NSGlobal.reserved(name.uri())) {
// no global namespaces allowed
error(ANNRES, name);
}
ann.add(name, value);
}
/**
* Parses the "NamespaceDecl" rule.
* @throws QueryException query exception
*/
private void namespaceDecl() throws QueryException {
final byte[] pref = ncName(XPNAME);
wsCheck(IS);
final byte[] uri = stringLiteral();
if(ctx.sc.ns.staticURI(pref) != null) error(DUPLNSDECL, pref);
ctx.sc.ns.add(pref, uri, info());
}
/**
* Parses the "RevalidationDecl" rule.
* @throws QueryException query exception
*/
private void revalidationDecl() throws QueryException {
if(declReval) error(DUPLREVAL);
declReval = true;
if(wsConsumeWs(STRICT) || wsConsumeWs(LAX)) error(NOREVAL);
wsCheck(SKIP);
}
/**
* Parses the "BoundarySpaceDecl" rule.
* @throws QueryException query exception
*/
private void boundarySpaceDecl() throws QueryException {
if(declSpaces) error(DUPLBOUND);
declSpaces = true;
final boolean spaces = wsConsumeWs(PRESERVE);
if(!spaces) wsCheck(STRIP);
ctx.sc.spaces = spaces;
}
/**
* Parses the "DefaultNamespaceDecl" rule.
* @return true if declaration was found
* @throws QueryException query exception
*/
private boolean defaultNamespaceDecl() throws QueryException {
final boolean elem = wsConsumeWs(ELEMENT);
if(!elem && !wsConsumeWs(FUNCTION)) return false;
wsCheck(NSPACE);
final byte[] uri = stringLiteral();
if(eq(XMLURI, uri)) error(BINDXMLURI, uri, XML);
if(eq(XMLNSURI, uri)) error(BINDXMLURI, uri, XMLNS);
if(elem) {
if(declElem) error(DUPLNS);
declElem = true;
ctx.sc.nsElem = uri.length == 0 ? null : uri;
} else {
if(declFunc) error(DUPLNS);
declFunc = true;
ctx.sc.nsFunc = uri.length == 0 ? null : uri;
}
return true;
}
/**
* Parses the "OptionDecl" rule.
* @throws QueryException query exception
*/
private void optionDecl() throws QueryException {
skipWS();
final QNm name = eQName(QNAMEINV, URICHECK);
final byte[] val = stringLiteral();
if(ctx.xquery3 && eq(name.prefix(), OUTPUT)) {
// output declaration
final String key = string(name.local());
if(module != null) error(MODOUT);
if(ctx.serProp == null) ctx.serProp = new SerializerProp();
if(ctx.serProp.get(key) == null) error(OUTWHICH, key);
if(serial.contains(key)) error(OUTDUPL, key);
ctx.serProp.set(key, string(val));
serial.add(key);
} else if(eq(name.prefix(), DB)) {
// project-specific declaration
final String key = string(uc(name.local()));
final Object obj = ctx.context.prop.get(key);
if(obj == null) error(NOOPTION, key);
// cache old value (to be reset after query evaluation)
ctx.globalOpt.put(key, obj);
ctx.dbOptions.put(key, string(val));
}
// ignore unknown options
}
/**
* Parses the "OrderingModeDecl" rule.
* @throws QueryException query exception
*/
private void orderingModeDecl() throws QueryException {
if(declOrder) error(DUPLORD);
declOrder = true;
ctx.sc.ordered = wsConsumeWs(ORDERED);
if(!ctx.sc.ordered) wsCheck(UNORDERED);
}
/**
* Parses the "emptyOrderDecl" rule.
* @return true if declaration was found
* @throws QueryException query exception
*/
private boolean emptyOrderDecl() throws QueryException {
if(!wsConsumeWs(ORDER)) return false;
wsCheck(EMPTYORD);
if(declGreat) error(DUPLORDEMP);
declGreat = true;
ctx.sc.orderGreatest = wsConsumeWs(GREATEST);
if(!ctx.sc.orderGreatest) wsCheck(LEAST);
return true;
}
/**
* Parses the "copyNamespacesDecl" rule.
* Parses the "PreserveMode" rule.
* Parses the "InheritMode" rule.
* @throws QueryException query exception
*/
private void copyNamespacesDecl() throws QueryException {
if(declPres) error(DUPLCOPYNS);
declPres = true;
ctx.sc.nsPreserve = wsConsumeWs(PRESERVE);
if(!ctx.sc.nsPreserve) wsCheck(NO_PRESERVE);
wsCheck(COMMA);
ctx.sc.nsInherit = wsConsumeWs(INHERIT);
if(!ctx.sc.nsInherit) wsCheck(NO_INHERIT);
}
/**
* Parses the "DecimalFormatDecl" rule.
* @param def default flag
* @return true if declaration was found
* @throws QueryException query exception
*/
private boolean decimalFormatDecl(final boolean def) throws QueryException {
if(def && !wsConsumeWs(DECIMAL_FORMAT)) return false;
// use empty name for default declaration
final QNm name = def ? new QNm() : eQName(QNAMEINV, null);
// check if format has already been declared
if(ctx.sc.decFormats.get(name.eqname()) != null) error(DECDUPL);
// create new format
final HashMap<String, String> map = new HashMap<String, String>();
// collect all property declarations
int n;
do {
n = map.size();
skipWS();
final String prop = string(ncName(null));
for(final String s : DECFORMATS) {
if(!prop.equals(s)) continue;
if(map.get(s) != null) error(DECDUPLPROP, s);
wsCheck(IS);
map.put(s, string(stringLiteral()));
break;
}
if(map.isEmpty()) error(NODECLFORM, prop);
} while(n != map.size());
// completes the format declaration
ctx.sc.decFormats.add(name.eqname(), new DecFormatter(info(), map));
return true;
}
/**
* Parses the "DefaultCollationDecl" rule.
* @return query expression
* @throws QueryException query exception
*/
private boolean defaultCollationDecl() throws QueryException {
if(!wsConsumeWs(COLLATION)) return false;
if(declColl) error(DUPLCOLL);
declColl = true;
final byte[] cl = ctx.sc.baseURI().resolve(Uri.uri(stringLiteral())).string();
if(!eq(URLCOLL, cl)) error(COLLWHICH, cl);
return true;
}
/**
* Parses the "BaseURIDecl" rule.
* @throws QueryException query exception
*/
private void baseURIDecl() throws QueryException {
if(declBase) error(DUPLBASE);
declBase = true;
final byte[] base = stringLiteral();
if(base.length != 0) ctx.sc.baseURI(string(base));
}
/**
* Parses the "SchemaImport" rule.
* Parses the "SchemaPrefix" rule.
* @throws QueryException query exception
*/
private void schemaImport() throws QueryException {
if(wsConsumeWs(NSPACE)) {
ncName(XPNAME);
wsCheck(IS);
} else if(wsConsumeWs(DEFAULT)) {
wsCheck(ELEMENT);
wsCheck(NSPACE);
}
final byte[] ns = stringLiteral();
if(ns.length == 0) error(NSEMPTY);
if(wsConsumeWs(AT)) {
do stringLiteral(); while(wsConsumeWs(COMMA));
}
error(IMPLSCHEMA);
}
/**
* Parses the "ModuleImport" rule.
* @throws QueryException query exception
*/
private void moduleImport() throws QueryException {
byte[] ns = EMPTY;
if(wsConsumeWs(NSPACE)) {
ns = ncName(XPNAME);
wsCheck(IS);
}
final byte[] uri = trim(stringLiteral());
if(uri.length == 0) error(NSMODURI);
if(modules.contains(uri)) error(DUPLMODULE, uri);
modules.add(uri);
// add non-default namespace
if(ns != EMPTY) ctx.sc.ns.add(ns, uri, info());
try {
// check modules at specified locations
if(wsConsumeWs(AT)) {
do {
module(stringLiteral(), uri);
} while(wsConsumeWs(COMMA));
return;
}
// check pre-declared module files
final byte[] path = ctx.modDeclared.get(uri);
if(path != null) {
module(path, uri);
return;
}
// check built-in modules
for(final byte[] u : MODULES) if(eq(uri, u)) return;
// resolve module uri
if(ctx.modules.addImport(uri, info(), this)) return;
error(NOMODULE, uri);
} catch(final StackOverflowError ex) {
Util.debug(ex);
error(CIRCMODULE);
}
}
/**
* Parses the specified module.
* @param path file path
* @param uri module uri
* @throws QueryException query exception
*/
public void module(final byte[] path, final byte[] uri) throws QueryException {
final byte[] u = ctx.modParsed.get(path);
if(u != null) {
if(!eq(uri, u)) error(WRONGMODULE, uri, path);
return;
}
ctx.modParsed.add(path, uri);
// check specified path and path relative to query file
final IO io = ctx.sc.io(string(path));
String qu = null;
try {
qu = string(io.read());
} catch(final IOException ex) {
error(NOMODULEFILE, ctx.context.user.has(Perm.ADMIN) ?
io.path() : io.name());
}
final StaticContext sc = ctx.sc;
ctx.sc = new StaticContext();
ctx.sc.baseURI(io.path());
new QueryParser(qu, ctx).parse(uri);
ctx.sc = sc;
}
/**
* Parses the "ContextItemDecl" rule.
* @throws QueryException query exception
*/
private void contextItemDecl() throws QueryException {
wsCheck(ITEMM);
if(declItem) error(DUPLITEM);
declItem = true;
if(module != null) error(DECITEM);
final SeqType st = optAsType();
if(st != null && st.type == AtomType.EMP) error(NOTYPE, st);
ctx.sc.initType = st;
if(!wsConsumeWs(EXTERNAL)) wsCheck(ASSIGN);
else if(!wsConsumeWs(ASSIGN)) return;
ctx.ctxItem = check(single(), NOVARDECL);
if(ctx.ctxItem.uses(Use.UPD)) UPCTX.thrw(info(), ctx.ctxItem);
}
/**
* Parses the "VarDecl" rule.
* @param ann annotations
* @throws QueryException query exception
*/
private void varDecl(final Ann ann) throws QueryException {
final Var v = typedVar(ann);
if(module != null && !eq(v.name.uri(), module.uri())) error(MODNS, v);
// check if variable has already been declared
final Var old = ctx.vars.get(v.name);
// throw no error if a variable has been externally bound
if(old != null && old.declared) error(VARDEFINE, old);
(old != null ? old : v).declared = true;
if(wsConsumeWs(EXTERNAL)) {
// bind value with new type
if(old != null && v.type != null) old.reset(v.type, ctx);
// bind default value
if(ctx.xquery3 && wsConsumeWs(ASSIGN)) {
v.bind(check(single(), NOVARDECL), ctx);
}
} else {
wsCheck(ASSIGN);
v.bind(check(single(), NOVARDECL), ctx);
}
// bind variable if not done yet
if(old == null) ctx.vars.updateGlobal(v);
}
/**
* Parses a variable declaration with optional type.
* @param ann annotations
* @return parsed variable
* @throws QueryException query exception
*/
private Var typedVar(final Ann ann) throws QueryException {
return Var.create(ctx, info(), varName(), optAsType(), ann);
}
/**
* Parses an optional SeqType declaration.
* @return type if preceded by {@code as}, {@code null} otherwise
* @throws QueryException query exception
*/
private SeqType optAsType() throws QueryException {
return wsConsumeWs(AS) ? sequenceType() : null;
}
/**
* Parses the "ConstructionDecl" rule.
* @throws QueryException query exception
*/
private void constructionDecl() throws QueryException {
if(declConstr) error(DUPLCONS);
declConstr = true;
ctx.sc.strip = wsConsumeWs(STRIP);
if(!ctx.sc.strip) wsCheck(PRESERVE);
}
/**
* Parses the "FunctionDecl" rule.
* @param ann annotations
* @throws QueryException query exception
*/
private void functionDecl(final Ann ann) throws QueryException {
final QNm name = eQName(FUNCNAME, ctx.sc.nsFunc);
if(keyword(name)) error(RESERVED, name);
if(module != null && !eq(name.uri(), module.uri())) error(MODNS, name);
wsCheck(PAR1);
final VarStack vl = ctx.vars.cache(4);
final Var[] args = paramList();
wsCheck(PAR2);
final UserFunc func = new UserFunc(info(), name, args, optAsType(), ann, true);
if(func.updating) ctx.updating(true);
ctx.funcs.add(func, info());
if(!wsConsumeWs(EXTERNAL)) func.expr = enclosed(NOFUNBODY);
ctx.vars.reset(vl);
}
/**
* Checks if the specified name equals reserved function names.
* @param name name to be checked
* @return result of check
*/
private static boolean keyword(final QNm name) {
if(name.hasPrefix()) return false;
final byte[] str = name.string();
for(final byte[] key : KEYWORDS) if(eq(key, str)) return true;
return false;
}
/**
* Parses a ParamList.
* @return declared variables
* @throws QueryException query exception
*/
private Var[] paramList() throws QueryException {
Var[] args = { };
skipWS();
while(true) {
if(curr() != '$') {
if(args.length == 0) break;
check('$');
}
final Var var = typedVar(null);
ctx.vars.add(var);
for(final Var v : args)
if(v.name.eq(var.name)) error(FUNCDUPL, var);
args = Array.add(args, var);
if(!consume(',')) break;
skipWS();
}
return args;
}
/**
* Parses the "EnclosedExpr" rule.
* @param err error message
* @return query expression
* @throws QueryException query exception
*/
private Expr enclosed(final Err err) throws QueryException {
wsCheck(BRACE1);
final Expr e = check(expr(), err);
wsCheck(BRACE2);
return e;
}
/**
* Parses the "Expr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr expr() throws QueryException {
final Expr e = single();
if(e == null) {
if(more()) return null;
if(alter != null) error();
else error(NOEXPR);
}
if(!wsConsume(COMMA)) return e;
final ExprList el = new ExprList(e);
do add(el, single()); while(wsConsume(COMMA));
return new List(info(), el.finish());
}
/**
* Parses the "ExprSingle" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr single() throws QueryException {
alter = null;
Expr e = flwor();
if(e == null) e = quantified();
if(e == null) e = switchh();
if(e == null) e = typeswitch();
if(e == null) e = iff();
if(e == null) e = tryCatch();
if(e == null) e = insert();
if(e == null) e = deletee();
if(e == null) e = rename();
if(e == null) e = replace();
if(e == null) e = transform();
if(e == null) e = or();
return e;
}
/**
* Parses the "FLWORExpr" rule.
* Parses the "WhereClause" rule.
* Parses the "OrderByClause" rule.
* Parses the "OrderSpecList" rule.
* Parses the "GroupByClause" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr flwor() throws QueryException {
// XQuery30: tumbling window, sliding window, count, allowing empty
// (still to be parsed and implemented)
final int s = ctx.vars.size();
final ForLet[] fl = forLet();
if(fl == null) return null;
Expr where = null;
if(wsConsumeWs(WHERE)) {
ap = ip;
where = check(single(), NOWHERE);
alter = NOWHERE;
}
Group group = null;
if(ctx.xquery3 && wsConsumeWs(GROUP)) {
wsCheck(BY);
ap = ip;
Group.Spec[] grp = null;
do grp = groupSpec(fl, grp); while(wsConsume(COMMA));
// find all non-grouping variables that aren't shadowed
final ArrayList<Var> ng = new ArrayList<Var>();
final TokenSet set = new TokenSet();
for(final Group.Spec spec : grp) set.add(spec.grp.name.eqname());
for(int i = fl.length; --i >= 0;) {
for(final Var v : fl[i].vars()) {
final byte[] eqn = v.name.eqname();
if(!set.contains(eqn)) {
ng.add(v);
set.add(eqn);
}
}
}
// add new copies for all non-grouping variables
final Var[] ngrp = new Var[ng.size()];
for(int i = ng.size(); --i >= 0;) {
final Var v = ng.get(i);
// if one groups variables such as $x as xs:integer, then the resulting
// sequence isn't compatible with the type and can't be assigned
ngrp[i] = Var.create(ctx, info(), v.name, v.type != null
&& v.type.one() ? SeqType.get(v.type.type, Occ.ONE_MORE) : null, null);
ctx.vars.add(ngrp[i]);
}
group = new Group(grp[0].info, grp,
new Var[][]{ ng.toArray(new Var[ng.size()]), ngrp });
alter = GRPBY;
}
Order order = null;
final boolean stable = wsConsumeWs(STABLE);
if(stable) wsCheck(ORDER);
if(stable || wsConsumeWs(ORDER)) {
wsCheck(BY);
ap = ip;
OrderBy[] ob = null;
do ob = orderSpec(ob); while(wsConsume(COMMA));
// don't sort if all order-by clauses are empty
if(ob != null) {
ob = Array.add(ob, new OrderByStable(info()));
order = new Order(ob[0].info, ob);
}
alter = ORDERBY;
}
if(!wsConsumeWs(RETURN)) {
if(alter != null) error();
error(where == null ? FLWORWHERE : order == null ? FLWORORD : FLWORRET);
}
final Expr ret = check(single(), NORETURN);
ctx.vars.size(s);
return GFLWOR.get(fl, where, order, group, ret, info());
}
/**
* Parses the "ForClause" rule.
* Parses the "PositionalVar" rule.
* Parses the "LetClause" rule.
* Parses the "FTScoreVar" rule.
* @return query expression
* @throws QueryException query exception
*/
private ForLet[] forLet() throws QueryException {
ForLet[] fl = null;
boolean comma = false;
while(true) {
final boolean fr = wsConsumeWs(FOR, DOLLAR, NOFOR);
boolean score = !fr && wsConsumeWs(LET, SCORE, NOLET);
if(score) wsCheck(SCORE);
else if(!fr && !wsConsumeWs(LET, DOLLAR, NOLET)) return fl;
do {
if(comma && !fr) score = wsConsumeWs(SCORE);
final QNm name = varName();
final SeqType type = score ? SeqType.DBL : optAsType();
final Var var = Var.create(ctx, info(), name, type, null);
final Var ps = fr && wsConsumeWs(AT) ? Var.create(ctx, info(),
varName(), SeqType.ITR, null) : null;
final Var sc = fr && wsConsumeWs(SCORE) ? Var.create(ctx, info(),
varName(), SeqType.DBL, null) : null;
wsCheck(fr ? IN : ASSIGN);
final Expr e = check(single(), NOVARDECL);
ctx.vars.add(var);
if(ps != null) {
if(name.eq(ps.name)) error(DUPLVAR, var);
ctx.vars.add(ps);
}
if(sc != null) {
if(name.eq(sc.name)) error(DUPLVAR, var);
if(ps != null && ps.name.eq(sc.name)) error(DUPLVAR, ps);
ctx.vars.add(sc);
}
fl = fl == null ? new ForLet[1] : Arrays.copyOf(fl, fl.length + 1);
fl[fl.length - 1] = fr ? new For(info(), e, var, ps, sc) : new Let(
info(), e, var, score);
score = false;
comma = true;
} while(wsConsume(COMMA));
comma = false;
}
}
/**
* Parses the "OrderSpec" rule.
* Parses the "OrderModifier" rule.
*
* Empty order specs are ignored, {@code order} is then returned unchanged.
* @param order order array
* @return new order array
* @throws QueryException query exception
*/
private OrderBy[] orderSpec(final OrderBy[] order) throws QueryException {
final Expr e = check(single(), ORDERBY);
boolean desc = false;
if(!wsConsumeWs(ASCENDING)) desc = wsConsumeWs(DESCENDING);
boolean least = !ctx.sc.orderGreatest;
if(wsConsumeWs(EMPTYORD)) {
least = !wsConsumeWs(GREATEST);
if(least) wsCheck(LEAST);
}
if(wsConsumeWs(COLLATION)) {
final byte[] coll = stringLiteral();
if(!eq(URLCOLL, coll)) error(WHICHCOLL, coll);
}
if(e.isEmpty()) return order;
final OrderBy ord = new OrderByExpr(info(), e, desc, least);
return order == null ? new OrderBy[] { ord } : Array.add(order, ord);
}
/**
* Parses the "GroupingSpec" rule.
* @param fl for/let clauses
* @param group grouping specification
* @return new group array
* @throws QueryException query exception
*/
private Group.Spec[] groupSpec(final ForLet[] fl, final Group.Spec[] group)
throws QueryException {
final InputInfo ii = info();
final QNm name = varName();
final SeqType type = optAsType();
final Var var = Var.create(ctx, ii, name, type, null);
final Expr by;
if(type != null || wsConsume(ASSIGN)) {
if(type != null) wsCheck(ASSIGN);
by = check(single(), NOVARDECL);
} else {
final Var v = checkVar(var.name, GVARNOTDEFINED);
// the grouping variable has to be declared by the same FLWOR expression
boolean dec = false;
for(final ForLet f : fl) {
if(f.declares(v)) {
dec = true;
break;
}
}
if(!dec) throw error(GVARNOTDEFINED, v);
by = new VarRef(ii, v);
}
if(wsConsumeWs(COLLATION)) {
final byte[] coll = stringLiteral();
if(!eq(URLCOLL, coll)) throw error(WHICHCOLL, coll);
}
// add the new grouping var
ctx.vars.add(var);
final Group.Spec grp = new Group.Spec(ii, var, by);
return group == null ? new Group.Spec[] { grp } : Array.add(group, grp);
}
/**
* Parses the "QuantifiedExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr quantified() throws QueryException {
final boolean some = wsConsumeWs(SOME, DOLLAR, NOSOME);
if(!some && !wsConsumeWs(EVERY, DOLLAR, NOSOME)) return null;
final int s = ctx.vars.size();
For[] fl = { };
do {
final Var var = typedVar(null);
wsCheck(IN);
final Expr e = check(single(), NOSOME);
ctx.vars.add(var);
fl = Array.add(fl, new For(info(), e, var));
} while(wsConsume(COMMA));
wsCheck(SATISFIES);
final Expr e = check(single(), NOSOME);
ctx.vars.size(s);
return new Quantifier(info(), fl, e, !some);
}
/**
* Parses the "SwitchExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr switchh() throws QueryException {
if(!ctx.xquery3 || !wsConsumeWs(SWITCH, PAR1, TYPEPAR)) return null;
wsCheck(PAR1);
final Expr expr = check(expr(), NOSWITCH);
SwitchCase[] exprs = { };
wsCheck(PAR2);
// collect all cases
ExprList cases;
do {
cases = new ExprList(null);
while(wsConsumeWs(CASE)) add(cases, single());
if(cases.size() == 1) {
// add default case
if(exprs.length == 0) error(WRONGCHAR, CASE, found());
wsCheck(DEFAULT);
}
wsCheck(RETURN);
cases.set(0, single());
exprs = Array.add(exprs, new SwitchCase(info(), cases.finish()));
} while(cases.size() != 1);
return new Switch(info(), expr, exprs);
}
/**
* Parses the "TypeswitchExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr typeswitch() throws QueryException {
if(!wsConsumeWs(TYPESWITCH, PAR1, TYPEPAR)) return null;
wsCheck(PAR1);
final Expr ts = check(expr(), NOTYPESWITCH);
wsCheck(PAR2);
TypeCase[] cases = { };
final int s = ctx.vars.size();
boolean cs;
do {
cs = wsConsumeWs(CASE);
if(!cs) wsCheck(DEFAULT);
skipWS();
QNm name = null;
if(curr('$')) {
name = varName();
if(cs) wsCheck(AS);
}
final Var v = Var.create(ctx, info(), name, cs ? sequenceType() : null, null);
if(name != null) ctx.vars.add(v);
wsCheck(RETURN);
final Expr ret = check(single(), NOTYPESWITCH);
cases = Array.add(cases, new TypeCase(info(), v, ret));
ctx.vars.size(s);
} while(cs);
if(cases.length == 1) error(NOTYPESWITCH);
return new TypeSwitch(info(), ts, cases);
}
/**
* Parses the "IfExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr iff() throws QueryException {
if(!wsConsumeWs(IF, PAR1, IFPAR)) return null;
wsCheck(PAR1);
final Expr iff = check(expr(), NOIF);
wsCheck(PAR2);
if(!wsConsumeWs(THEN)) error(NOIF);
final Expr thn = check(single(), NOIF);
if(!wsConsumeWs(ELSE)) error(NOIF);
final Expr els = check(single(), NOIF);
return new If(info(), iff, thn, els);
}
/**
* Parses the "OrExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr or() throws QueryException {
final Expr e = and();
if(!wsConsumeWs(OR)) return e;
final ExprList el = new ExprList(e);
do add(el, and()); while(wsConsumeWs(OR));
return new Or(info(), el.finish());
}
/**
* Parses the "AndExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr and() throws QueryException {
final Expr e = comparison();
if(!wsConsumeWs(AND)) return e;
final ExprList el = new ExprList(e);
do add(el, comparison()); while(wsConsumeWs(AND));
return new And(info(), el.finish());
}
/**
* Parses the "ComparisonExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr comparison() throws QueryException {
final Expr e = ftContains();
if(e != null) {
for(final OpV c : OpV.values()) if(wsConsumeWs(c.name))
return new CmpV(e, check(ftContains(), CMPEXPR), c, info());
for(final OpN c : OpN.values()) if(wsConsumeWs(c.name))
return new CmpN(e, check(ftContains(), CMPEXPR), c, info());
for(final OpG c : OpG.values()) if(wsConsumeWs(c.name))
return new CmpG(e, check(ftContains(), CMPEXPR), c, info());
}
return e;
}
/**
* Parses the "FTContainsExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr ftContains() throws QueryException {
final Expr e = stringConcat();
final int i = ip;
// use "=>" and "<-" as unofficial shortcuts for full-text expressions
if(consume('=') && consume('>') || consume('<') && consume('-')) {
skipWS();
} else if(!wsConsumeWs(CONTAINS) || !wsConsumeWs(TEXT)) {
ip = i;
return e;
}
final FTExpr select = ftSelection(false);
if(wsConsumeWs(WITHOUT)) {
wsCheck(CONTENT);
union();
error(FTIGNORE);
}
return new FTContains(e, select, info());
}
/**
* Parses the "StringConcatExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr stringConcat() throws QueryException {
final Expr e = range();
if(e == null || !consume(CONCAT)) return e;
final ExprList el = new ExprList(e);
do add(el, range()); while(wsConsume(CONCAT));
return Function.CONCAT.get(info(), el.finish());
}
/**
* Parses the "RangeExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr range() throws QueryException {
final Expr e = additive();
if(!wsConsumeWs(TO)) return e;
return new Range(info(), e, check(additive(), INCOMPLETE));
}
/**
* Parses the "AdditiveExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr additive() throws QueryException {
Expr e = multiplicative();
while(e != null) {
final Calc c = consume('+') ? Calc.PLUS : consume('-') ? Calc.MINUS : null;
if(c == null) break;
e = new Arith(info(), e, check(multiplicative(), CALCEXPR), c);
}
return e;
}
/**
* Parses the "MultiplicativeExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr multiplicative() throws QueryException {
Expr e = union();
while(e != null) {
final Calc c = consume('*') ? Calc.MULT : wsConsumeWs(DIV) ? Calc.DIV
: wsConsumeWs(IDIV) ? Calc.IDIV : wsConsumeWs(MOD) ? Calc.MOD : null;
if(c == null) break;
e = new Arith(info(), e, check(union(), CALCEXPR), c);
}
return e;
}
/**
* Parses the "UnionExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr union() throws QueryException {
final Expr e = intersect();
if(e == null || !isUnion()) return e;
final ExprList el = new ExprList(e);
do add(el, intersect()); while(isUnion());
return new Union(info(), el.finish());
}
/**
* Checks if a union operator is found.
* @return result of check
* @throws QueryException query exception
*/
private boolean isUnion() throws QueryException {
if(wsConsumeWs(UNION)) return true;
final int i = ip;
if(consume(PIPE) && !consume(PIPE)) return true;
ip = i;
return false;
}
/**
* Parses the "IntersectExceptExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr intersect() throws QueryException {
final Expr e = instanceoff();
if(wsConsumeWs(INTERSECT)) {
final ExprList el = new ExprList(e);
do add(el, instanceoff()); while(wsConsumeWs(INTERSECT));
return new InterSect(info(), el.finish());
}
if(wsConsumeWs(EXCEPT)) {
final ExprList el = new ExprList(e);
do add(el, instanceoff()); while(wsConsumeWs(EXCEPT));
return new Except(info(), el.finish());
}
return e;
}
/**
* Parses the "InstanceofExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr instanceoff() throws QueryException {
final Expr e = treat();
if(!wsConsumeWs(INSTANCE)) return e;
wsCheck(OF);
return new Instance(info(), e, sequenceType());
}
/**
* Parses the "TreatExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr treat() throws QueryException {
final Expr e = castable();
if(!wsConsumeWs(TREAT)) return e;
wsCheck(AS);
return new Treat(info(), e, sequenceType());
}
/**
* Parses the "CastableExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr castable() throws QueryException {
final Expr e = cast();
if(!wsConsumeWs(CASTABLE)) return e;
wsCheck(AS);
return new Castable(info(), e, simpleType());
}
/**
* Parses the "CastExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr cast() throws QueryException {
final Expr e = unary();
if(!wsConsumeWs(CAST)) return e;
wsCheck(AS);
return new Cast(info(), e, simpleType());
}
/**
* Parses the "UnaryExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr unary() throws QueryException {
boolean minus = false;
boolean found = false;
do {
skipWS();
if(consume('-')) {
minus ^= true;
found = true;
} else if(consume('+')) {
found = true;
} else {
final Expr e = value();
return found ? new Unary(info(), check(e, EVALUNARY), minus) : e;
}
} while(true);
}
/**
* Parses the "ValueExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr value() throws QueryException {
validate();
final Expr e = path();
return e != null ? e : extension();
}
/**
* Parses the "ValidateExpr" rule.
* @throws QueryException query exception
*/
private void validate() throws QueryException {
if(wsConsumeWs(VALIDATE)) {
if(!wsConsumeWs(STRICT) && !wsConsumeWs(LAX) && wsConsumeWs(TYPE)) {
final QNm qnm = eQName(QNAMEINV, SKIPCHECK);
names.add(new QNmCheck(qnm));
error(NOSCHEMA, qnm);
}
wsCheck(BRACE1);
check(single(), NOVALIDATE);
wsCheck(BRACE2);
error(IMPLVAL);
}
}
/**
* Parses the "ExtensionExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr extension() throws QueryException {
final Pragma[] pragmas = pragma();
return pragmas.length == 0 ? null : new Extension(info(), pragmas,
enclosed(NOPRAGMA));
}
/**
* Parses the "Pragma" rule.
* @return array of pragmas
* @throws QueryException query exception
*/
private Pragma[] pragma() throws QueryException {
final ArrayList<Pragma> el = new ArrayList<Pragma>();
while(wsConsumeWs(PRAGMA)) {
final QNm name = eQName(QNAMEINV, URICHECK);
char c = curr();
if(c != '
tok.reset();
while(c != '#' || next() != ')') {
if(c == 0) error(PRAGMAINV);
tok.add(consume());
c = curr();
}
el.add(new Pragma(name, tok.trim().finish()));
ip += 2;
}
return el.toArray(new Pragma[el.size()]);
}
/**
* Parses the "PathExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr path() throws QueryException {
checkInit();
final ExprList el;
Expr root = null;
if(consume('/')) {
root = new Root(info());
el = new ExprList();
final Expr ex;
if(consume('/')) {
// two slashes: absolute descendant path
checkAxis(Axis.DESC);
add(el, descOrSelf());
mark();
ex = step();
if(ex == null) {
// two slashes, but no following step: error
if(more()) checkInit();
error(PATHMISS, found());
}
} else {
// one slash: absolute child path
checkAxis(Axis.CHILD);
mark();
ex = step();
// no more steps: return root expression
if(ex == null) return root;
}
add(el, ex);
relativePath(el);
} else {
// relative path (no preceding slash)
mark();
final Expr ex = step();
if(ex == null) return null;
// return non-step expression if no path or map operator follows
final boolean nostep = curr() != '/' && (curr() != '!' || next() == '=');
if(nostep && !(ex instanceof AxisStep)) return ex;
el = new ExprList();
if(ex instanceof AxisStep) add(el, ex);
else root = ex;
relativePath(el);
}
return Path.get(info(), root, el.finish());
}
/**
* Parses the "RelativePathExpr" rule.
* @param el expression list
* @throws QueryException query exception
*/
void relativePath(final ExprList el) throws QueryException {
while(true) {
boolean b = false;
if(consume('/')) {
if(consume('/')) {
add(el, descOrSelf());
checkAxis(Axis.DESC);
} else {
checkAxis(Axis.CHILD);
}
} else if(next() != '=' && consume('!')) {
b = true;
} else {
return;
}
mark();
Expr st = step();
if(st == null) error(PATHMISS, found());
if(b) st = new Bang(info(), st);
add(el, st);
}
}
/**
* Returns a standard descendant-or-self::node() step.
* @return step
*/
private AxisStep descOrSelf() {
return AxisStep.get(info(), Axis.DESCORSELF, Test.NOD);
}
// methods for query suggestions
/**
* Performs an optional check init.
*/
protected void checkInit() { }
/**
* Performs an optional axis check.
* @param axis axis
*/
@SuppressWarnings("unused")
protected void checkAxis(final Axis axis) { }
/**
* Performs an optional test check.
* @param test node test
* @param attr attribute flag
*/
@SuppressWarnings("unused")
protected void checkTest(final Test test, final boolean attr) { }
/**
* Checks a predicate.
* @param open open flag
*/
@SuppressWarnings("unused")
protected void checkPred(final boolean open) { }
/**
* Parses the "StepExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr step() throws QueryException {
final Expr e = postfix();
return e != null ? e : axisStep();
}
/**
* Parses the "AxisStep" rule.
* @return query expression
* @throws QueryException query exception
*/
private AxisStep axisStep() throws QueryException {
Axis ax = null;
Test test = null;
if(wsConsume(DOT2)) {
ax = Axis.PARENT;
test = Test.NOD;
checkTest(test, false);
} else if(consume('@')) {
ax = Axis.ATTR;
test = nodeTest(true, true);
checkTest(test, true);
if(test == null) {
--ip;
error(NOATTNAME);
}
} else {
for(final Axis a : Axis.values()) {
final int i = ip;
if(!wsConsumeWs(a.name)) continue;
alter = NOLOCSTEP;
if(wsConsumeWs(COLS)) {
ap = ip;
ax = a;
test = nodeTest(a == Axis.ATTR, true);
checkTest(test, a == Axis.ATTR);
break;
}
ip = i;
}
}
if(ax == null) {
ax = Axis.CHILD;
test = nodeTest(false, true);
if(test != null && test.type == NodeType.ATT) ax = Axis.ATTR;
checkTest(test, ax == Axis.ATTR);
}
if(test == null) return null;
final ExprList el = new ExprList();
while(wsConsume(BR1)) {
checkPred(true);
add(el, expr());
wsCheck(BR2);
checkPred(false);
}
return AxisStep.get(info(), ax, test, el.finish());
}
/**
* Parses the "NodeTest" rule.
* Parses the "NameTest" rule.
* Parses the "KindTest" rule.
* @param att attribute flag
* @param all check all tests, or only names
* @return query expression
* @throws QueryException query exception
*/
private Test nodeTest(final boolean att, final boolean all) throws QueryException {
final int i = ip;
if(consume('*')) {
// name test: *
if(!consume(':')) return new NameTest(att);
// name test: *:name
return new NameTest(new QNm(ncName(QNAMEINV)), NameTest.Mode.NAME, att);
}
if(ctx.xquery3 && consume(EQNAME)) {
// name test: {...}*
final byte[] uri = bracedURILiteral();
if(consume('*')) {
final QNm nm = new QNm(COLON, uri);
return new NameTest(nm, NameTest.Mode.NS, att);
}
}
final QNm name = eQName(null, SKIPCHECK);
if(name != null) {
final int i2 = ip;
if(all && wsConsumeWs(PAR1)) {
final NodeType type = NodeType.find(name);
if(type != null) return kindTest(type);
} else {
ip = i2;
// name test: prefix:name, name
if(name.hasPrefix() || !consume(':')) {
skipWS();
names.add(new QNmCheck(name, !att));
return new NameTest(name, NameTest.Mode.STD, att);
}
// name test: prefix:*
if(consume('*')) {
final QNm nm = new QNm(concat(name.string(), COLON));
names.add(new QNmCheck(nm, !att));
return new NameTest(nm, NameTest.Mode.NS, att);
}
}
}
ip = i;
return null;
}
/**
* Parses the "FilterExpr" rule.
* Parses the "Predicate" rule.
* @return postfix expression
* @throws QueryException query exception
*/
private Expr postfix() throws QueryException {
Expr e = primary(), old;
do {
old = e;
if(wsConsume(BR1)) {
if(e == null) error(PREDMISSING);
final ExprList el = new ExprList();
do {
add(el, expr());
wsCheck(BR2);
} while(wsConsume(BR1));
e = new Filter(info(), e, el.finish());
} else if(e != null) {
final Expr[] args = argumentList(e);
if(args == null) break;
final Var[] part = new Var[args.length];
final boolean pt = partial(args, part);
e = new DynamicFunc(info(), e, args);
if(pt) e = new PartFunc(info(), e, part);
}
} while(e != old);
return e;
}
/**
* Fills gaps from place-holders with variable references.
* @param args argument array
* @param vars variables array
* @return variables bound
*/
private boolean partial(final Expr[] args, final Var[] vars) {
final InputInfo ii = info();
boolean found = false;
for(int i = 0; i < args.length; i++) {
if(args[i] == null) {
vars[i] = ctx.uniqueVar(ii, null);
args[i] = new VarRef(ii, vars[i]);
found = true;
}
}
return found;
}
/**
* Parses the "PrimaryExpr" rule.
* Parses the "VarRef" rule.
* Parses the "ContextItem" rule.
* Parses the "Literal" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr primary() throws QueryException {
skipWS();
final char c = curr();
// variables
if(c == '$') return new VarRef(info(), checkVar(varName(), VARUNDEF));
// parentheses
if(c == '(' && next() != '#') return parenthesized();
// direct constructor
if(c == '<') return constructor();
// function item
if(ctx.xquery3) {
final Expr e = functionItem();
if(e != null) return e;
}
// function call
Expr e = functionCall();
if(e != null) return e;
// computed constructors
e = compConstructor();
if(e != null) return e;
// ordered expression
if(wsConsumeWs(ORDERED, BRACE1, INCOMPLETE) ||
wsConsumeWs(UNORDERED, BRACE1, INCOMPLETE)) return enclosed(NOENCLEXPR);
// map literal
if(wsConsumeWs(MAPSTR, BRACE1, INCOMPLETE)) return mapLiteral();
// context item
if(c == '.' && !digit(next())) {
if(next() == '.') return null;
consume('.');
return new Context(info());
}
// literals
return literal();
}
/**
* Parses a literal map.
* @return map literal
* @throws QueryException query exception
*/
private Expr mapLiteral() throws QueryException {
wsCheck(BRACE1);
final ExprList el = new ExprList();
if(!wsConsume(BRACE2)) {
do {
add(el, check(single(), INVMAPKEY));
wsCheck(ASSIGN);
add(el, check(single(), INVMAPVAL));
} while(wsConsume(COMMA));
wsCheck(BRACE2);
}
return new LitMap(info(), el.finish());
}
/**
* Parses the "FunctionItemExpr" rule.
* Parses the "NamedFunctionRef" rule.
* Parses the "LiteralFunctionItem" rule.
* Parses the "InlineFunction" rule.
* @return query expression, or {@code null}
* @throws QueryException query exception
*/
private Expr functionItem() throws QueryException {
skipWS();
final int pos = ip;
// parse annotations
final Ann ann = ctx.xquery3 && curr('%') ? annotations() : null;
// inline function
if(wsConsume(FUNCTION) && wsConsume(PAR1)) {
final int s = ctx.vars.size();
final Var[] args = paramList();
wsCheck(PAR2);
final SeqType type = optAsType();
final Expr body = enclosed(NOFUNBODY);
ctx.vars.size(s);
return new InlineFunc(info(), type, args, body, ann);
}
// annotations not allowed here
if(ann != null) error(NOANN);
// named function reference
ip = pos;
final QNm name = eQName(null, ctx.sc.nsFunc);
if(name != null && consume('
final long card = ((Int) numericLiteral(true)).itr(null);
if(card < 0 || card > Integer.MAX_VALUE) error(FUNCUNKNOWN, name);
return Functions.get(name, card, false, ctx, info());
}
ip = pos;
return null;
}
/**
* Parses the "Literal" rule.
* @return query expression, or {@code null}
* @throws QueryException query exception
*/
private Item literal() throws QueryException {
final char c = curr();
// literals
if(digit(c) || c == '.') return numericLiteral(false);
// strings
if(!quote(c)) return null;
final int i = ip;
final byte[] s = stringLiteral();
final int p2 = ip;
if(consume(':')) {
// check for EQName
if(!consume('=')) {
ip = i;
return null;
}
ip = p2;
}
return Str.get(s);
}
/**
* Parses the "NumericLiteral" rule.
* Parses the "DecimalLiteral" rule.
* Parses the "IntegerLiteral" rule.
* @param itr integer flag
* @return query expression
* @throws QueryException query exception
*/
private Item numericLiteral(final boolean itr) throws QueryException {
tok.reset();
while(digit(curr())) tok.add(consume());
final boolean dec = consume('.');
if(dec) {
// decimal literal
if(itr) error(NUMBERITR);
tok.add('.');
while(digit(curr()))
tok.add(consume());
}
if(XMLToken.isNCStartChar(curr())) return checkDbl();
if(dec) return new Dec(tok.finish());
final long l = toLong(tok.finish());
if(l == Long.MIN_VALUE) error(RANGE, tok);
return Int.get(l);
}
/**
* Parses the "DoubleLiteral" rule. Checks if a number is followed by a
* whitespace.
* @return expression
* @throws QueryException query exception
*/
private Dbl checkDbl() throws QueryException {
if(!consume('e') && !consume('E')) error(NUMBERWS);
tok.add('e');
if(curr('+') || curr('-')) tok.add(consume());
final int s = tok.size();
while(digit(curr()))
tok.add(consume());
if(s == tok.size()) error(NUMBERINC, tok);
if(XMLToken.isNCStartChar(curr())) error(NUMBERWS);
return Dbl.get(tok.finish(), info());
}
/**
* Parses the "StringLiteral" rule.
* @return query expression
* @throws QueryException query exception
*/
private byte[] stringLiteral() throws QueryException {
skipWS();
final char del = curr();
if(!quote(del)) error(NOQUOTE, found());
consume();
tok.reset();
while(true) {
while(!consume(del)) {
if(!more()) error(NOQUOTE, found());
entity(tok);
}
if(!consume(del)) break;
tok.add(del);
}
return tok.finish();
}
/**
* Parses the "BracedURILiteral" rule without the "Q{" prefix.
* @return query expression
* @throws QueryException query exception
*/
private byte[] bracedURILiteral() throws QueryException {
tok.reset();
while(!consume('}')) {
if(!more()) error(WRONGCHAR, BRACE2, found());
entity(tok);
}
return tok.finish();
}
/**
* Parses the "VarName" rule.
* @return query expression
* @throws QueryException query exception
*/
private QNm varName() throws QueryException {
wsCheck(DOLLAR);
skipWS();
return eQName(NOVARNAME, null);
}
/**
* Parses the "ParenthesizedExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr parenthesized() throws QueryException {
wsCheck(PAR1);
final Expr e = expr();
wsCheck(PAR2);
return e == null ? Empty.SEQ : e;
}
/**
* Parses the "FunctionCall" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr functionCall() throws QueryException {
final int i = ip;
final QNm name = eQName(null, ctx.sc.nsFunc);
if(name != null && !keyword(name)) {
final Expr[] args = argumentList(name.string());
if(args != null) {
alter = FUNCUNKNOWN;
alterFunc = name;
ap = ip;
final Var[] vars = new Var[args.length];
final boolean part = partial(args, vars);
final TypedFunc f = Functions.get(name, args, false, ctx, info());
if(f != null) {
alter = null;
return part ? new PartFunc(info(), f, vars) : f.fun;
}
}
}
ip = i;
return null;
}
/**
* Parses the "ArgumentList" rule.
* @param name name of the function (item)
* @return array of arguments, place-holders '?' are represented as
* {@code null} entries
* @throws QueryException query exception
*/
private Expr[] argumentList(final Object name) throws QueryException {
if(!wsConsume(PAR1)) return null;
Expr[] args = { };
if(!wsConsume(PAR2)) {
do {
Expr arg = null;
if(!wsConsume(PLHOLDER) && (arg = single()) == null)
error(FUNCMISS, name);
// speeding up array creation
final int a = args.length;
final Expr[] tmp = new Expr[a + 1];
System.arraycopy(args, 0, tmp, 0, a);
tmp[a] = arg;
args = tmp;
} while(wsConsume(COMMA));
if(!wsConsume(PAR2)) error(FUNCMISS, name);
}
return args;
}
/**
* Parses the "Constructor" rule.
* Parses the "DirectConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr constructor() throws QueryException {
check('<');
return consume('!') ? dirComment() : consume('?') ? dirPI() : dirElement();
}
/**
* Parses the "DirElemConstructor" rule.
* Parses the "DirAttributeList" rules.
* @return query expression
* @throws QueryException query exception
*/
private Expr dirElement() throws QueryException {
// cache namespace information
final int s = ctx.sc.ns.size();
final byte[] nse = ctx.sc.nsElem;
final int npos = names.size();
final QNm tag = new QNm(qName(TAGNAME));
names.add(new QNmCheck(tag));
consumeWS();
final Atts ns = new Atts();
final ExprList cont = new ExprList();
// parse attributes...
boolean xmlDecl = false; // xml prefix explicitly declared?
while(true) {
final byte[] atn = qName(null);
if(atn.length == 0) break;
final ExprList attv = new ExprList();
consumeWS();
check('=');
consumeWS();
final char delim = consume();
if(!quote(delim)) error(NOQUOTE, found());
final TokenBuilder tb = new TokenBuilder();
boolean simple = true;
do {
while(!consume(delim)) {
final char ch = curr();
if(ch == '{') {
if(next() == '{') {
tb.add(consume());
consume();
} else {
final byte[] text = tb.finish();
if(text.length != 0) {
add(attv, Str.get(text));
} else {
add(attv, enclosed(NOENCLEXPR));
simple = false;
}
tb.reset();
}
} else if(ch == '}') {
consume();
check('}');
tb.add('}');
} else if(ch == '<' || ch == 0) {
error(NOQUOTE, found());
} else if(ch == '\n' || ch == '\t') {
tb.add(' ');
consume();
} else if(ch == '\r') {
if(next() != '\n') tb.add(' ');
consume();
} else {
entity(tb);
}
}
if(!consume(delim)) break;
tb.add(delim);
} while(true);
if(!tb.isEmpty()) add(attv, Str.get(tb.finish()));
// parse namespace declarations
final boolean pr = startsWith(atn, XMLNSC);
if(pr || eq(atn, XMLNS)) {
if(!simple) error(NSCONS);
final byte[] pref = pr ? local(atn) : EMPTY;
final byte[] uri = attv.isEmpty() ? EMPTY :
((Str) attv.get(0)).string();
if(eq(pref, XML) && eq(uri, XMLURI)) {
if(xmlDecl) error(DUPLNSDEF, XML);
xmlDecl = true;
} else {
if(pr) {
if(uri.length == 0) error(NSEMPTYURI);
if(eq(pref, XML, XMLNS)) error(BINDXML, pref);
if(eq(uri, XMLURI)) error(BINDXMLURI, uri, XML);
if(eq(uri, XMLNSURI)) error(BINDXMLURI, uri, XMLNS);
ctx.sc.ns.add(pref, uri);
} else {
ctx.sc.nsElem = uri;
}
if(ns.get(pref) != -1) error(DUPLNSDEF, pref);
ns.add(pref, uri);
}
} else {
final QNm attn = new QNm(atn);
names.add(new QNmCheck(attn, false));
add(cont, new CAttr(info(), false, attn, attv.finish()));
}
if(!consumeWS()) break;
}
if(consume('/')) {
check('>');
} else {
check('>');
while(curr() != '<' || next() != '/') {
final Expr e = dirElemContent(tag.string());
if(e == null) continue;
add(cont, e);
}
ip += 2;
final byte[] close = qName(TAGNAME);
consumeWS();
check('>');
if(!eq(tag.string(), close)) error(TAGWRONG, tag.string(), close);
}
assignURI(npos);
ctx.sc.ns.size(s);
ctx.sc.nsElem = nse;
return new CElem(info(), tag, ns, cont.finish());
}
/**
* Parses the "DirElemContent" rule.
* @param tag opening tag
* @return query expression
* @throws QueryException query exception
*/
private Expr dirElemContent(final byte[] tag) throws QueryException {
final TokenBuilder tb = new TokenBuilder();
boolean strip = true;
do {
final char c = curr();
if(c == '<') {
if(wsConsume(CDATA)) {
tb.add(cDataSection());
strip = false;
} else {
final Str txt = text(tb, strip);
return txt != null ? txt : next() == '/' ? null : constructor();
}
} else if(c == '{') {
if(next() == '{') {
tb.add(consume());
consume();
} else {
final Str txt = text(tb, strip);
return txt != null ? txt : enclosed(NOENCLEXPR);
}
} else if(c == '}') {
consume();
check('}');
tb.add('}');
} else if(c != 0) {
strip &= !entity(tb);
} else {
error(NOCLOSING, tag);
}
} while(true);
}
/**
* Returns a string item.
* @param tb token builder
* @param strip strip flag
* @return text or {@code null}
*/
private Str text(final TokenBuilder tb, final boolean strip) {
final byte[] t = tb.finish();
return t.length == 0 || strip && !ctx.sc.spaces && ws(t) ?
null : Str.get(t);
}
/**
* Parses the "DirCommentConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr dirComment() throws QueryException {
check('-');
check('-');
final TokenBuilder tb = new TokenBuilder();
do {
while(not('-'))
tb.add(consume());
consume();
if(consume('-')) {
check('>');
return new CComm(info(), Str.get(tb.finish()));
}
tb.add('-');
} while(true);
}
/**
* Parses the "DirPIConstructor" rule.
* Parses the "DirPIContents" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr dirPI() throws QueryException {
final byte[] str = ncName(INVALPI);
if(eq(lc(str), XML)) error(PIXML, str);
final boolean space = skipWS();
final TokenBuilder tb = new TokenBuilder();
do {
while(not('?')) {
if(!space) error(PIWRONG);
tb.add(consume());
}
consume();
if(consume('>')) {
return new CPI(info(), Str.get(str), Str.get(tb.finish()));
}
tb.add('?');
} while(true);
}
/**
* Parses the "CDataSection" rule.
* @return CData
* @throws QueryException query exception
*/
private byte[] cDataSection() throws QueryException {
final TokenBuilder tb = new TokenBuilder();
while(true) {
while(not(']')) {
char ch = consume();
if(ch == '\r') {
ch = '\n';
if(curr(ch)) consume();
}
tb.add(ch);
}
consume();
if(curr(']') && next() == '>') {
ip += 2;
return tb.finish();
}
tb.add(']');
}
}
/**
* Parses the "ComputedConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compConstructor() throws QueryException {
final int i = ip;
if(wsConsumeWs(DOCUMENT)) return consume(compDoc(), i);
if(wsConsumeWs(ELEMENT)) return consume(compElement(), i);
if(wsConsumeWs(ATTRIBUTE)) return consume(compAttribute(), i);
if(wsConsumeWs(NSPACE)) return consume(compNamespace(), i);
if(wsConsumeWs(TEXT)) return consume(compText(), i);
if(wsConsumeWs(COMMENT)) return consume(compComment(), i);
if(wsConsumeWs(PI)) return consume(compPI(), i);
return null;
}
/**
* Consumes the specified expression or resets the query position.
* @param expr expression
* @param p query position
* @return expression or {@code null}
*/
private Expr consume(final Expr expr, final int p) {
if(expr == null) ip = p;
return expr;
}
/**
* Parses the "CompDocConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compDoc() throws QueryException {
if(!wsConsume(BRACE1)) return null;
final Expr e = check(expr(), NODOCCONS);
wsCheck(BRACE2);
return new CDoc(info(), e);
}
/**
* Parses the "CompElemConstructor" rule.
* Parses the "ContextExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compElement() throws QueryException {
skipWS();
Expr name;
final QNm qn = eQName(null, SKIPCHECK);
if(qn != null) {
name = qn;
names.add(new QNmCheck(qn));
} else {
if(!wsConsume(BRACE1)) return null;
name = check(expr(), NOTAG);
wsCheck(BRACE2);
}
if(!wsConsume(BRACE1)) return null;
final Expr e = expr();
wsCheck(BRACE2);
return new CElem(info(), name, null,
e == null ? new Expr[0] : new Expr[] { e });
}
/**
* Parses the "CompAttrConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compAttribute() throws QueryException {
skipWS();
Expr name;
final QNm qn = eQName(null, SKIPCHECK);
if(qn != null) {
name = qn;
names.add(new QNmCheck(qn, false));
} else {
if(!wsConsume(BRACE1)) return null;
name = expr();
wsCheck(BRACE2);
}
if(!wsConsume(BRACE1)) return null;
final Expr e = expr();
wsCheck(BRACE2);
return new CAttr(info(), true, name, e == null ? Empty.SEQ : e);
}
/**
* Parses the "CompNamespaceConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compNamespace() throws QueryException {
if(!ctx.xquery3) return null;
skipWS();
Expr name;
final byte[] str = ncName(null);
if(str.length != 0) {
name = Str.get(str);
} else {
if(!wsConsume(BRACE1)) return null;
name = check(expr(), NSWRONG);
wsCheck(BRACE2);
}
if(!wsConsume(BRACE1)) return null;
final Expr e = expr();
wsCheck(BRACE2);
return new CNSpace(info(), name, e == null ? Empty.SEQ : e);
}
/**
* Parses the "CompTextConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compText() throws QueryException {
if(!wsConsume(BRACE1)) return null;
final Expr e = check(expr(), NOTXTCONS);
wsCheck(BRACE2);
return new CTxt(info(), e);
}
/**
* Parses the "CompCommentConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compComment() throws QueryException {
if(!wsConsume(BRACE1)) return null;
final Expr e = check(expr(), NOCOMCONS);
wsCheck(BRACE2);
return new CComm(info(), e);
}
/**
* Parses the "CompPIConstructor" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr compPI() throws QueryException {
skipWS();
Expr name;
final byte[] str = ncName(null);
if(str.length != 0) {
name = Str.get(str);
} else {
if(!wsConsume(BRACE1)) return null;
name = check(expr(), PIWRONG);
wsCheck(BRACE2);
}
if(!wsConsume(BRACE1)) return null;
final Expr e = expr();
wsCheck(BRACE2);
return new CPI(info(), name, e == null ? Empty.SEQ : e);
}
/**
* Parses the "SimpleType" rule.
* @return sequence type
* @throws QueryException query exception
*/
private SeqType simpleType() throws QueryException {
skipWS();
final QNm name = eQName(TYPEINVALID, ctx.sc.nsElem);
final Type t = AtomType.find(name, false);
if(t == null) error(TYPEUNKNOWN, name);
if(t == AtomType.AAT || t == AtomType.NOT) error(CASTUNKNOWN, name);
skipWS();
return SeqType.get(t, consume('?') ? Occ.ZERO_ONE : Occ.ONE);
}
/**
* Parses the "SequenceType" rule.
* Parses the "OccurrenceIndicator" rule.
* Parses the "KindTest" rule.
* @return sequence type
* @throws QueryException query exception
*/
private SeqType sequenceType() throws QueryException {
// empty sequence
if(wsConsumeWs(string(AtomType.EMP.string()), PAR1, null)) {
wsCheck(PAR1);
wsCheck(PAR2);
return SeqType.get(AtomType.EMP, Occ.ONE, null);
}
// parse item type and occurrence indicator
final TypeWrapper tw = itemType();
skipWS();
final Occ occ = consume('?') ? Occ.ZERO_ONE : consume('+') ? Occ.ONE_MORE :
consume('*') ? Occ.ZERO_MORE : Occ.ONE;
skipWS();
return SeqType.get(tw.type, occ, tw.test);
}
/**
* Parses the "ItemType" rule.
* Parses the "ParenthesizedItemType" rule.
* @return item type
* @throws QueryException query exception
*/
private TypeWrapper itemType() throws QueryException {
skipWS();
// parenthesized item type
if(consume(PAR1)) {
final TypeWrapper ret = itemType();
wsCheck(PAR2);
return ret;
}
// parse optional annotation and type name
final Ann ann = ctx.xquery3 && curr('%') ? annotations() : null;
final QNm name = eQName(TYPEINVALID, null);
skipWS();
// check if name is followed by parentheses
final boolean func = curr('(');
// item type
Type t = null;
if(func) {
consume(PAR1);
// item type
if(name.eq(AtomType.ITEM.name)) t = AtomType.ITEM;
// node types
if(t == null) t = NodeType.find(name);
// function types
if(t == null) {
t = FuncType.find(name);
// [LW] XQuery, function test: add annotation support
if(t != null) return new TypeWrapper(functionTest(t));
}
// no type found
if(t == null) error(NOTYPE, new TokenBuilder(name.string()));
} else {
// attach default element namespace
if(!name.hasURI()) name.uri(ctx.sc.nsElem);
// atomic types
t = AtomType.find(name, false);
// no type found
if(t == null) error(TYPEUNKNOWN, name);
}
// annotations are not allowed for remaining types
if(ann != null) error(NOANN);
// atomic value, or closing parenthesis
if(!func || wsConsume(PAR2)) return new TypeWrapper(t);
// raise error if type different to node is not finalized by a parenthesis
if(!(t instanceof NodeType)) wsCheck(PAR2);
// return type with an optional kind test for node types
return new TypeWrapper(t, kindTest((NodeType) t));
}
/**
* Parses the "FunctionTest" rule.
* @param t function type
* @return resulting type
* @throws QueryException query exception
*/
private Type functionTest(final Type t) throws QueryException {
// wildcard
if(wsConsume(ASTERISK)) {
wsCheck(PAR2);
return t;
}
// map
if(t.isMap()) {
final Type key = itemType().type;
if(!key.instanceOf(AtomType.AAT)) error(MAPTAAT, key);
wsCheck(COMMA);
final Type tp = MapType.get((AtomType) key, sequenceType());
wsCheck(PAR2);
return tp;
}
// function type
SeqType[] args = { };
if(!wsConsume(PAR2)) {
// function has got arguments
do {
args = Array.add(args, sequenceType());
} while(wsConsume(COMMA));
wsCheck(PAR2);
}
wsCheck(AS);
final SeqType st = sequenceType();
return FuncType.get(st, args);
}
/**
* Parses the "ElementTest" rule without the type name and the opening bracket.
* @param t type
* @return arguments
* @throws QueryException query exception
*/
private Test kindTest(final NodeType t) throws QueryException {
Test tp = null;
switch(t) {
case DOC: tp = documentTest(); break;
case ELM: tp = elementTest(); break;
case ATT: tp = attributeTest(); break;
case PI: tp = piTest(); break;
case SCE: tp = schemaTest(); break;
case SCA: tp = schemaTest(); break;
default: break;
}
wsCheck(PAR2);
return tp == null ? Test.get(t) : tp;
}
/**
* Parses the "DocumentTest" rule without the leading keyword and its brackets.
* @return arguments
* @throws QueryException query exception
*/
private Test documentTest() throws QueryException {
final boolean elem = consume(ELEMENT);
if(!elem && !consume(SCHEMA_ELEMENT)) return null;
wsCheck(PAR1);
final Test t = elem ? elementTest() : schemaTest();
wsCheck(PAR2);
return new DocTest(t != null ? t : Test.ELM);
}
/**
* Parses the "ElementTest" rule without the leading keyword and its brackets.
* @return arguments
* @throws QueryException query exception
*/
private Test elementTest() throws QueryException {
final QNm name = eQName(null, ctx.sc.nsElem);
if(name == null && !consume(ASTERISK)) return null;
Type type = null;
if(wsConsumeWs(COMMA)) {
// parse type name
final QNm tn = eQName(QNAMEINV, ctx.sc.nsElem);
type = AtomType.find(tn, true);
if(type == null) error(TYPEUNDEF, tn);
// parse optional question mark
wsConsume(PLHOLDER);
}
return new ExtTest(NodeType.ELM, name, type, ctx.sc.strip);
}
/**
* Parses the "ElementTest" rule without the leading keyword and its brackets.
* @return arguments
* @throws QueryException query exception
*/
private Test schemaTest() throws QueryException {
final QNm name = eQName(QNAMEINV, ctx.sc.nsElem);
throw error(SCHEMAINV, name);
}
/**
* Parses the "AttributeTest" rule without the leading keyword and its brackets.
* @return arguments
* @throws QueryException query exception
*/
private Test attributeTest() throws QueryException {
final QNm name = eQName(null, null);
if(name == null && !consume(ASTERISK)) return null;
Type type = null;
if(wsConsumeWs(COMMA)) {
// parse type name
final QNm tn = eQName(QNAMEINV, ctx.sc.nsElem);
type = AtomType.find(tn, true);
if(type == null) error(TYPEUNDEF, tn);
}
return new ExtTest(NodeType.ATT, name, type, ctx.sc.strip);
}
/**
* Parses the "PITest" rule without the leading keyword and its brackets.
* @return arguments
* @throws QueryException query exception
*/
private Test piTest() throws QueryException {
final byte[] nm;
tok.reset();
if(quote(curr())) {
nm = trim(stringLiteral());
if(!XMLToken.isNCName(nm)) error(INVNCNAME, nm);
} else if(ncName()) {
nm = tok.finish();
} else {
return null;
}
return new ExtTest(NodeType.PI, new QNm(nm));
}
/**
* Parses the "TryCatch" rules.
* @return query expression
* @throws QueryException query exception
*/
private Expr tryCatch() throws QueryException {
if(!ctx.xquery3 || !wsConsumeWs(TRY)) return null;
final Expr tr = enclosed(NOENCLEXPR);
wsCheck(CATCH);
Catch[] ct = { };
do {
QNm[] codes = { };
do {
skipWS();
final Test test = nodeTest(false, false);
if(test == null) error(NOCATCH);
codes = Array.add(codes, test.name);
} while(wsConsumeWs(PIPE));
final Catch c = new Catch(info(), codes, ctx);
final int s = c.prepare(ctx);
c.expr = enclosed(NOENCLEXPR);
ctx.vars.size(s);
ct = Array.add(ct, c);
} while(wsConsumeWs(CATCH));
return new Try(info(), tr, ct);
}
/**
* Parses the "FTSelection" rules.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftSelection(final boolean prg) throws QueryException {
FTExpr expr = ftOr(prg);
FTExpr old;
FTExpr first = null;
boolean ordered = false;
do {
old = expr;
if(wsConsumeWs(ORDERED)) {
ordered = true;
old = null;
} else if(wsConsumeWs(WINDOW)) {
expr = new FTWindow(info(), expr, additive(), ftUnit());
} else if(wsConsumeWs(DISTANCE)) {
final Expr[] rng = ftRange(false);
if(rng == null) error(FTRANGE);
expr = new FTDistance(info(), expr, rng, ftUnit());
} else if(wsConsumeWs(AT)) {
final boolean start = wsConsumeWs(START);
final boolean end = !start && wsConsumeWs(END);
if(!start && !end) error(INCOMPLETE);
expr = new FTContent(info(), expr, start, end);
} else if(wsConsumeWs(ENTIRE)) {
wsCheck(CONTENT);
expr = new FTContent(info(), expr, false, false);
} else {
final boolean same = wsConsumeWs(SAME);
final boolean diff = !same && wsConsumeWs(DIFFERENT);
if(same || diff) {
FTUnit unit = null;
if(wsConsumeWs(SENTENCE)) unit = FTUnit.SENTENCE;
else if(wsConsumeWs(PARAGRAPH)) unit = FTUnit.PARAGRAPH;
else error(INCOMPLETE);
expr = new FTScope(info(), expr, unit, same);
}
}
if(first == null && old != null && old != expr) first = expr;
} while(old != expr);
if(ordered) {
if(first == null) return new FTOrder(info(), expr);
first.expr[0] = new FTOrder(info(), first.expr[0]);
}
return expr;
}
/**
* Parses the "FTOr" rule.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftOr(final boolean prg) throws QueryException {
final FTExpr e = ftAnd(prg);
if(!wsConsumeWs(FTOR)) return e;
FTExpr[] list = { e };
do list = Array.add(list, ftAnd(prg)); while(wsConsumeWs(FTOR));
return new FTOr(info(), list);
}
/**
* Parses the "FTAnd" rule.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftAnd(final boolean prg) throws QueryException {
final FTExpr e = ftMildNot(prg);
if(!wsConsumeWs(FTAND)) return e;
FTExpr[] list = { e };
do list = Array.add(list, ftMildNot(prg)); while(wsConsumeWs(FTAND));
return new FTAnd(info(), list);
}
/**
* Parses the "FTMildNot" rule.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftMildNot(final boolean prg) throws QueryException {
final FTExpr e = ftUnaryNot(prg);
if(!wsConsumeWs(NOT)) return e;
FTExpr[] list = { };
do {
wsCheck(IN);
list = Array.add(list, ftUnaryNot(prg));
} while(wsConsumeWs(NOT));
// convert "A not in B not in ..." to "A not in(B or ...)"
return new FTMildNot(info(), e, list.length == 1 ? list[0] : new FTOr(
info(), list));
}
/**
* Parses the "FTUnaryNot" rule.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftUnaryNot(final boolean prg) throws QueryException {
final boolean not = wsConsumeWs(FTNOT);
final FTExpr e = ftPrimaryWithOptions(prg);
return not ? new FTNot(info(), e) : e;
}
/**
* Parses the "FTPrimaryWithOptions" rule.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftPrimaryWithOptions(final boolean prg) throws QueryException {
FTExpr expr = ftPrimary(prg);
final FTOpt fto = new FTOpt();
boolean found = false;
while(ftMatchOption(fto)) found = true;
// check if specified language is not available
if(fto.ln == null) fto.ln = Language.def();
if(!Tokenizer.supportFor(fto.ln)) error(FTNOTOK, fto.ln);
if(fto.is(ST) && fto.sd == null && !Stemmer.supportFor(fto.ln))
error(FTNOSTEM, fto.ln);
// consume weight option
if(wsConsumeWs(WEIGHT)) expr = new FTWeight(info(), expr,
enclosed(NOENCLEXPR));
// skip options if none were specified...
return found ? new FTOptions(info(), expr, fto) : expr;
}
/**
* Parses the "FTPrimary" rule.
* @param prg pragma flag
* @return query expression
* @throws QueryException query exception
*/
private FTExpr ftPrimary(final boolean prg) throws QueryException {
final Pragma[] pragmas = pragma();
if(pragmas.length != 0) {
wsCheck(BRACE1);
final FTExpr e = ftSelection(true);
wsCheck(BRACE2);
return new FTExtensionSelection(info(), pragmas, e);
}
if(wsConsumeWs(PAR1)) {
final FTExpr e = ftSelection(false);
wsCheck(PAR2);
return e;
}
skipWS();
final Expr e = curr('{') ? enclosed(NOENCLEXPR)
: quote(curr()) ? Str.get(stringLiteral()) : null;
if(e == null) error(prg ? NOPRAGMA : NOENCLEXPR);
// FTAnyAllOption
FTMode mode = FTMode.M_ANY;
if(wsConsumeWs(ALL)) {
mode = wsConsumeWs(WORDS) ? FTMode.M_ALLWORDS : FTMode.M_ALL;
} else if(wsConsumeWs(ANY)) {
mode = wsConsumeWs(WORD) ? FTMode.M_ANYWORD : FTMode.M_ANY;
} else if(wsConsumeWs(PHRASE)) {
mode = FTMode.M_PHRASE;
}
// FTTimes
Expr[] occ = null;
if(wsConsumeWs(OCCURS)) {
occ = ftRange(false);
if(occ == null) error(FTRANGE);
wsCheck(TIMES);
}
return new FTWords(info(), e, mode, occ);
}
/**
* Parses the "FTRange" rule.
* @param i accept only integers ("FTLiteralRange")
* @return query expression
* @throws QueryException query exception
*/
private Expr[] ftRange(final boolean i) throws QueryException {
final Expr[] occ = { Int.get(1), Int.get(Long.MAX_VALUE)};
if(wsConsumeWs(EXACTLY)) {
occ[0] = ftAdditive(i);
occ[1] = occ[0];
} else if(wsConsumeWs(AT)) {
if(wsConsumeWs(LEAST)) {
occ[0] = ftAdditive(i);
} else {
wsCheck(MOST);
occ[0] = Int.get(0);
occ[1] = ftAdditive(i);
}
} else if(wsConsumeWs(FROM)) {
occ[0] = ftAdditive(i);
wsCheck(TO);
occ[1] = ftAdditive(i);
} else {
return null;
}
return occ;
}
/**
* Returns an argument of the "FTRange" rule.
* @param i accept only integers
* @return query expression
* @throws QueryException query exception
*/
private Expr ftAdditive(final boolean i) throws QueryException {
if(!i) return additive();
skipWS();
tok.reset();
while(digit(curr()))
tok.add(consume());
if(tok.isEmpty()) error(INTEXP);
return Int.get(toLong(tok.finish()));
}
/**
* Parses the "FTUnit" rule.
* @return query expression
* @throws QueryException query exception
*/
private FTUnit ftUnit() throws QueryException {
if(wsConsumeWs(WORDS)) return FTUnit.WORD;
if(wsConsumeWs(SENTENCES)) return FTUnit.SENTENCE;
if(wsConsumeWs(PARAGRAPHS)) return FTUnit.PARAGRAPH;
error(INCOMPLETE);
return null;
}
/**
* Parses the "FTMatchOption" rule.
* @param opt options instance
* @return false if no options were found
* @throws QueryException query exception
*/
private boolean ftMatchOption(final FTOpt opt) throws QueryException {
if(!wsConsumeWs(USING)) return false;
if(wsConsumeWs(LOWERCASE)) {
if(opt.isSet(LC) || opt.isSet(UC) || opt.isSet(CS)) error(FTDUP, CASE);
opt.set(CS, true);
opt.set(LC, true);
} else if(wsConsumeWs(UPPERCASE)) {
if(opt.isSet(LC) || opt.isSet(UC) || opt.isSet(CS)) error(FTDUP, CASE);
opt.set(CS, true);
opt.set(UC, true);
} else if(wsConsumeWs(CASE)) {
if(opt.isSet(LC) || opt.isSet(UC) || opt.isSet(CS)) error(FTDUP, CASE);
opt.set(CS, wsConsumeWs(SENSITIVE));
if(!opt.is(CS)) wsCheck(INSENSITIVE);
} else if(wsConsumeWs(DIACRITICS)) {
if(opt.isSet(DC)) error(FTDUP, DIACRITICS);
opt.set(DC, wsConsumeWs(SENSITIVE));
if(!opt.is(DC)) wsCheck(INSENSITIVE);
} else if(wsConsumeWs(LANGUAGE)) {
if(opt.ln != null) error(FTDUP, LANGUAGE);
final byte[] lan = stringLiteral();
opt.ln = Language.get(string(lan));
if(opt.ln == null) error(FTNOTOK, lan);
} else if(wsConsumeWs(OPTION)) {
optionDecl();
} else {
final boolean using = !wsConsumeWs(NO);
if(wsConsumeWs(STEMMING)) {
if(opt.isSet(ST)) error(FTDUP, STEMMING);
opt.set(ST, using);
} else if(wsConsumeWs(THESAURUS)) {
if(opt.th != null) error(FTDUP, THESAURUS);
opt.th = new ThesQuery();
if(using) {
final boolean par = wsConsume(PAR1);
if(!wsConsumeWs(DEFAULT)) ftThesaurusID(opt.th);
while(par && wsConsume(COMMA))
ftThesaurusID(opt.th);
if(par) wsCheck(PAR2);
}
} else if(wsConsumeWs(STOP)) {
// add union/except
wsCheck(WORDS);
if(opt.sw != null) error(FTDUP, STOP + ' ' + WORDS);
opt.sw = new StopWords();
if(wsConsumeWs(DEFAULT)) {
if(!using) error(FTSTOP);
} else {
boolean union = false;
boolean except = false;
while(using) {
if(wsConsume(PAR1)) {
do {
final byte[] sl = stringLiteral();
if(except) opt.sw.delete(sl);
else if(!union || !opt.sw.contains(sl)) opt.sw.add(sl);
} while(wsConsume(COMMA));
wsCheck(PAR2);
} else if(wsConsumeWs(AT)) {
final String fn = string(stringLiteral());
// optional: resolve URI reference
final IO fl = ctx.stop != null ? ctx.stop.get(fn) : ctx.sc.io(fn);
if(!opt.sw.read(fl, except)) error(NOSTOPFILE, fl);
} else if(!union && !except) {
error(FTSTOP);
}
union = wsConsumeWs(UNION);
except = !union && wsConsumeWs(EXCEPT);
if(!union && !except) break;
}
}
} else if(wsConsumeWs(WILDCARDS)) {
if(opt.isSet(WC)) error(FTDUP, WILDCARDS);
if(opt.is(FZ)) error(FTFZWC);
opt.set(WC, using);
} else if(wsConsumeWs(FUZZY)) {
if(opt.isSet(FZ)) error(FTDUP, FUZZY);
if(opt.is(WC)) error(FTFZWC);
opt.set(FZ, using);
} else {
error(FTMATCH, consume());
return false;
}
}
return true;
}
/**
* Parses the "FTThesaurusID" rule.
* @param thes link to thesaurus
* @throws QueryException query exception
*/
private void ftThesaurusID(final ThesQuery thes) throws QueryException {
wsCheck(AT);
final String fn = string(stringLiteral());
// optional: resolve URI reference
final IO fl = ctx.thes != null ? ctx.thes.get(fn) : ctx.sc.io(fn);
final byte[] rel = wsConsumeWs(RELATIONSHIP) ? stringLiteral() : EMPTY;
final Expr[] range = ftRange(true);
long min = 0;
long max = Long.MAX_VALUE;
if(range != null) {
wsCheck(LEVELS);
// values will always be integer instances
min = ((Int) range[0]).itr(info());
max = ((Int) range[1]).itr(info());
}
thes.add(new Thesaurus(fl, rel, min, max, ctx.context));
}
/**
* Parses the "InsertExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr insert() throws QueryException {
final int i = ip;
if(!wsConsumeWs(INSERT) || !wsConsumeWs(NODE) && !wsConsumeWs(NODES)) {
ip = i;
return null;
}
final Expr s = check(single(), INCOMPLETE);
boolean first = false;
boolean last = false;
boolean before = false;
boolean after = false;
if(wsConsumeWs(AS)) {
first = wsConsumeWs(FIRST);
if(!first) {
wsCheck(LAST);
last = true;
}
wsCheck(INTO);
} else if(!wsConsumeWs(INTO)) {
after = wsConsumeWs(AFTER);
before = !after && wsConsumeWs(BEFORE);
if(!after && !before) error(INCOMPLETE);
}
final Expr trg = check(single(), INCOMPLETE);
ctx.updating(true);
return new Insert(info(), s, first, last, before, after, trg);
}
/**
* Parses the "DeleteExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr deletee() throws QueryException {
final int i = ip;
if(!wsConsumeWs(DELETE) || !wsConsumeWs(NODES) && !wsConsumeWs(NODE)) {
ip = i;
return null;
}
ctx.updating(true);
return new Delete(info(), check(single(), INCOMPLETE));
}
/**
* Parses the "RenameExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr rename() throws QueryException {
final int i = ip;
if(!wsConsumeWs(RENAME) || !wsConsumeWs(NODE)) {
ip = i;
return null;
}
final Expr trg = check(single(), INCOMPLETE);
wsCheck(AS);
final Expr n = check(single(), INCOMPLETE);
ctx.updating(true);
return new Rename(info(), trg, n);
}
/**
* Parses the "ReplaceExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr replace() throws QueryException {
final int i = ip;
if(!wsConsumeWs(REPLACE)) return null;
final boolean v = wsConsumeWs(VALUEE);
if(v) {
wsCheck(OF);
wsCheck(NODE);
} else if(!wsConsumeWs(NODE)) {
ip = i;
return null;
}
final Expr t = check(single(), INCOMPLETE);
wsCheck(WITH);
final Expr r = check(single(), INCOMPLETE);
ctx.updating(true);
return new Replace(info(), t, r, v);
}
/**
* Parses the "TransformExpr" rule.
* @return query expression
* @throws QueryException query exception
*/
private Expr transform() throws QueryException {
if(!wsConsumeWs(COPY, DOLLAR, INCOMPLETE)) return null;
final boolean u = ctx.updating();
ctx.updating(false);
final int s = ctx.vars.size();
Let[] fl = { };
do {
final Var v = Var.create(ctx, info(), varName(), null);
wsCheck(ASSIGN);
final Expr e = check(single(), INCOMPLETE);
ctx.vars.add(v);
fl = Array.add(fl, new Let(info(), e, v));
} while(wsConsumeWs(COMMA));
wsCheck(MODIFY);
final Expr m = check(single(), INCOMPLETE);
wsCheck(RETURN);
final Expr r = check(single(), INCOMPLETE);
ctx.vars.size(s);
ctx.updating(u);
return new Transform(info(), fl, m, r);
}
/**
* Parses the "NCName" rule.
* @param err optional error message
* @return string
* @throws QueryException query exception
*/
private byte[] ncName(final Err err) throws QueryException {
tok.reset();
if(ncName()) return tok.finish();
if(err != null) error(err, tok);
return EMPTY;
}
/**
* Parses the "EQName" rule.
* @param err optional error message. Will be thrown if no EQName is found,
* or ignored if set to {@code null}
* @param def default namespace, or operation mode
* ({@link #URICHECK}, {@link #SKIPCHECK})
* @return string
* @throws QueryException query exception
*/
private QNm eQName(final Err err, final byte[] def) throws QueryException {
final int i = ip;
if(ctx.xquery3 && consume(EQNAME)) {
final byte[] uri = bracedURILiteral();
final byte[] name = ncName(null);
if(name.length != 0) {
if(def == URICHECK && uri.length == 0) error(NOURI, name);
return new QNm(name, uri);
}
ip = i;
}
final byte[] nm = qName(err);
if(nm.length == 0) return null;
if(def == SKIPCHECK) return new QNm(nm);
// create new EQName and set namespace
final QNm name = new QNm(nm, ctx);
if(!name.hasURI()) {
if(def == URICHECK) error(NSMISS, name);
if(name.hasPrefix()) error(NOURI, name);
name.uri(def);
}
return name;
}
/**
* Parses the "QName" rule.
* @param err optional error message. Will be thrown if no QName is found, and
* ignored if set to {@code null}
* @return string
* @throws QueryException query exception
*/
private byte[] qName(final Err err) throws QueryException {
tok.reset();
if(!ncName()) {
if(err != null) error(err, consume());
} else if(consume(':')) {
if(!XMLToken.isNCStartChar(curr())) {
--ip;
} else {
tok.add(':');
do {
tok.add(consume());
} while(XMLToken.isNCChar(curr()));
}
}
return tok.finish();
}
/**
* Helper method for parsing NCNames.
* @return true for success
*/
private boolean ncName() {
if(!XMLToken.isNCStartChar(curr())) return false;
do {
tok.add(consume());
} while(XMLToken.isNCChar(curr()));
return true;
}
/**
* Parses and converts entities.
* @param tb token builder
* @return true if an entity was found
* @throws QueryException query exception
*/
private boolean entity(final TokenBuilder tb) throws QueryException {
final int i = ip;
final boolean ent = consume('&');
if(ent) {
if(consume('
final int b = consume('x') ? 16 : 10;
int n = 0;
do {
final char c = curr();
final boolean m = digit(c);
final boolean h = b == 16
&& (c >= 'a' && c <= 'f' || c >= 'A' && c <= 'F');
if(!m && !h) entityError(i, INVENTITY);
final long nn = n;
n = n * b + (consume() & 15);
if(n < nn) entityError(i, INVCHARREF);
if(!m) n += 9;
} while(!consume(';'));
if(!XMLToken.valid(n)) entityError(i, INVCHARREF);
tb.add(n);
} else {
if(consume("lt")) {
tb.add('<');
} else if(consume("gt")) {
tb.add('>');
} else if(consume("amp")) {
tb.add('&');
} else if(consume("quot")) {
tb.add('"');
} else if(consume("apos")) {
tb.add('\'');
} else {
entityError(i, INVENTITY);
}
if(!consume(';')) entityError(i, INVENTITY);
}
} else {
final char c = consume();
int ch = c;
if(Character.isHighSurrogate(c) && curr() != 0
&& Character.isLowSurrogate(curr())) {
ch = Character.toCodePoint(c, consume());
}
if(ch == '\r') {
ch = '\n';
if(curr(ch)) consume();
}
tb.add(ch);
}
return ent;
}
/**
* Raises an entity error.
* @param p start position
* @param c error code
* @throws QueryException query exception
*/
private void entityError(final int p, final Err c) throws QueryException {
final String sub = input.substring(p, Math.min(p + 20, il));
final int sc = sub.indexOf(';');
final String ent = sc != -1 ? sub.substring(0, sc + 1) : sub;
error(c, ent);
}
/**
* Raises an error if the specified expression is empty.
* @param <E> expression type
* @param expr expression
* @param err error message
* @return expression
* @throws QueryException query exception
*/
private <E extends Expr> E check(final E expr, final Err err) throws QueryException {
if(expr == null) error(err);
return expr;
}
/**
* Raises an error if the specified character cannot be consumed.
* @param ch character to be found
* @throws QueryException query exception
*/
private void check(final int ch) throws QueryException {
if(!consume(ch)) error(WRONGCHAR, (char) ch, found());
}
/**
* Skips whitespaces, raises an error if the specified string cannot be
* consumed.
* @param s string to be found
* @throws QueryException query exception
*/
private void wsCheck(final String s) throws QueryException {
if(!wsConsume(s)) error(WRONGCHAR, s, found());
}
/**
* Checks if a referenced variable is defined and throws the specified error
* if not.
* @param name variable name
* @param err error to throw
* @return referenced variable
* @throws QueryException if the variable isn't defined
*/
private Var checkVar(final QNm name, final Err err) throws QueryException {
Var v = ctx.vars.get(name);
// dynamically assign variables from function modules
if(v == null && !declVars) {
declVars = true;
Variable.init(ctx);
v = ctx.vars.get(name);
}
if(v == null) error(err, '$' + string(name.string()));
return v;
}
/**
* Checks if the specified character is not found. An error is raised if the
* input is exhausted.
* @param ch character to be found
* @return result of check
* @throws QueryException query exception
*/
private boolean not(final char ch) throws QueryException {
final char c = curr();
if(c == 0) error(WRONGCHAR, ch, found());
return c != ch;
}
/**
* Consumes the specified token and surrounding whitespaces.
* @param t token to consume
* @return true if token was found
* @throws QueryException query exception
*/
private boolean wsConsumeWs(final String t) throws QueryException {
final int i = ip;
if(!wsConsume(t)) return false;
if(skipWS() || !XMLToken.isNCStartChar(t.charAt(0))
|| !XMLToken.isNCChar(curr())) return true;
ip = i;
return false;
}
/**
* Consumes the specified two strings or jumps back to the old query position.
* If the strings are found, the cursor is placed after the first token.
* @param s1 string to be consumed
* @param s2 second string
* @param expr alternative error message
* @return result of check
* @throws QueryException query exception
*/
private boolean wsConsumeWs(final String s1, final String s2, final Err expr)
throws QueryException {
final int i = ip;
if(!wsConsumeWs(s1)) return false;
alter = expr;
ap = ip;
final int i2 = ip;
final boolean ok = wsConsume(s2);
ip = ok ? i2 : i;
return ok;
}
/**
* Skips whitespaces, consumes the specified string and ignores trailing
* characters.
* @param str string to consume
* @return true if string was found
* @throws QueryException query exception
*/
private boolean wsConsume(final String str) throws QueryException {
skipWS();
return consume(str);
}
/**
* Consumes all whitespace characters from the remaining query.
* @return true if whitespaces were found
* @throws QueryException query exception
*/
private boolean skipWS() throws QueryException {
final int i = ip;
while(more()) {
final int c = curr();
if(c == '(' && next() == ':') {
comment();
} else {
if(c <= 0 || c > ' ') return i != ip;
++ip;
}
}
return i != ip;
}
/**
* Consumes a comment.
* @throws QueryException query exception
*/
private void comment() throws QueryException {
++ip;
while(++ip < il) {
if(curr('(') && next() == ':') comment();
if(curr(':') && next() == ')') {
ip += 2;
return;
}
}
error(COMCLOSE);
}
/**
* Consumes all following whitespace characters.
* @return true if whitespaces were found
*/
private boolean consumeWS() {
final int i = ip;
while(more()) {
final int c = curr();
if(c <= 0 || c > ' ') return i != ip;
++ip;
}
return true;
}
/**
* Throws the alternative error message.
* @throws QueryException query exception
*/
private void error() throws QueryException {
ip = ap;
if(alter != FUNCUNKNOWN) throw error(alter);
ctx.funcs.funError(alterFunc, info());
throw error(alter, alterFunc.string());
}
/**
* Adds an expression to the specified array.
* @param ar input array
* @param e new expression
* @throws QueryException query exception
*/
private void add(final ExprList ar, final Expr e) throws QueryException {
if(e == null) error(INCOMPLETE);
ar.add(e);
}
/**
* Throws the specified error.
* @param err error to be thrown
* @param arg error arguments
* @return never
* @throws QueryException query exception
*/
public QueryException error(final Err err, final Object... arg) throws QueryException {
throw err.thrw(info(), arg);
}
/**
* Finalizes the QNames by assigning namespace URIs.
* @param npos first entry to be checked
* @throws QueryException query exception
*/
private void assignURI(final int npos) throws QueryException {
for(int i = npos; i < names.size(); i++) {
if(names.get(i).assign(npos == 0)) names.remove(i
}
}
/** Type wrapper. */
private static class TypeWrapper {
/** Type. */
final Type type;
/** Kind test. */
final Test test;
/**
* Constructor.
* @param t type
*/
TypeWrapper(final Type t) {
this(t, null);
}
/**
* Constructor.
* @param t type
* @param k kind test
*/
TypeWrapper(final Type t, final Test k) {
type = t;
test = k;
}
}
/** Cache for checking QNames after their construction. */
private class QNmCheck {
/** QName to be checked. */
final QNm name;
/** Flag for assigning default element namespace. */
final boolean nsElem;
/**
* Constructor.
* @param nm qname
*/
QNmCheck(final QNm nm) {
this(nm, true);
}
/**
* Constructor.
* @param nm qname
* @param nse default check
*/
QNmCheck(final QNm nm, final boolean nse) {
name = nm;
nsElem = nse;
}
/**
* Assigns the namespace URI that is currently in scope.
* @param check check if prefix URI was assigned
* @return true if URI has a URI
* @throws QueryException query exception
*/
boolean assign(final boolean check) throws QueryException {
if(name.hasURI()) return true;
if(name.hasPrefix()) {
name.uri(ctx.sc.ns.uri(name.prefix()));
if(check && !name.hasURI()) error(NOURI, name);
} else if(nsElem) {
name.uri(ctx.sc.nsElem);
}
return name.hasURI();
}
}
}
|
package org.broad.igv.ui;
import org.apache.log4j.Logger;
import org.broad.igv.DirectoryManager;
import org.broad.igv.Globals;
import org.broad.igv.annotations.ForTesting;
import org.broad.igv.charts.ScatterPlotUtils;
import org.broad.igv.feature.genome.GenomeManager;
import org.broad.igv.ga4gh.Ga4ghAPIHelper;
import org.broad.igv.ga4gh.GoogleUtils;
import org.broad.igv.ga4gh.OAuthUtils;
import org.broad.igv.gs.GSOpenSessionMenuAction;
import org.broad.igv.gs.GSSaveSessionMenuAction;
import org.broad.igv.gs.GSUtils;
import org.broad.igv.lists.GeneListManagerUI;
import org.broad.igv.prefs.PreferencesManager;
import org.broad.igv.tools.IgvToolsGui;
import org.broad.igv.tools.motiffinder.MotifFinderPlugin;
import org.broad.igv.track.CombinedDataSourceDialog;
import org.broad.igv.ui.action.*;
import org.broad.igv.event.GenomeChangeEvent;
import org.broad.igv.event.IGVEventBus;
import org.broad.igv.event.IGVEventObserver;
import org.broad.igv.feature.genome.RemoveGenomesDialog;
import org.broad.igv.ui.commandbar.GenomeComboBox;
import org.broad.igv.ui.legend.LegendDialog;
import org.broad.igv.ui.panel.FrameManager;
import org.broad.igv.ui.panel.MainPanel;
import org.broad.igv.ui.panel.ReferenceFrame;
import org.broad.igv.ui.panel.ReorderPanelsDialog;
import org.broad.igv.ui.util.*;
import org.broad.igv.util.BrowserLauncher;
import org.broad.igv.util.HttpUtils;
import org.broad.igv.util.LongRunningTask;
import org.broad.igv.util.ResourceLocator;
import org.broad.igv.util.blat.BlatClient;
import org.broad.igv.util.encode.EncodeFileBrowser;
import javax.swing.*;
import javax.swing.event.MenuEvent;
import javax.swing.event.MenuListener;
import javax.swing.plaf.basic.BasicBorders;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import static org.broad.igv.prefs.Constants.*;
import static org.broad.igv.ui.UIConstants.*;
/**
* Main menu bar at top of window. File / genomes / view / etc.
* Singleton
*
* @author jrobinso
* @date Apr 4, 2011
*/
public class IGVMenuBar extends JMenuBar implements IGVEventObserver {
private static Logger log = Logger.getLogger(IGVMenuBar.class);
public static final String GENOMESPACE_REG_TOOLTIP = "Register for GenomeSpace";
public static final String GENOMESPACE_REG_PAGE = "http:
private JMenu extrasMenu;
private FilterTracksMenuAction filterTracksAction;
private JMenu viewMenu;
IGV igv;
private JMenu toolsMenu;
/**
* We store this as a field because we alter it if
* we can't access genome server list
*/
private JMenuItem loadFromServerMenuItem;
private static final String LOAD_GENOME_SERVER_TOOLTIP = "Select genomes available on the server to appear in menu.";
private static final String CANNOT_LOAD_GENOME_SERVER_TOOLTIP = "Could not reach genome server";
private static IGVMenuBar instance;
private JMenu googleMenu;
private JMenuItem encodeMenuItem;
public void notifyGenomeServerReachable(boolean reachable) {
if (loadFromServerMenuItem != null) {
UIUtilities.invokeOnEventThread(() -> {
loadFromServerMenuItem.setEnabled(reachable);
String tooltip = reachable ? LOAD_GENOME_SERVER_TOOLTIP : CANNOT_LOAD_GENOME_SERVER_TOOLTIP;
loadFromServerMenuItem.setToolTipText(tooltip);
});
}
}
public void showAboutDialog() {
(new AboutDialog(IGV.getMainFrame(), true)).setVisible(true);
}
static IGVMenuBar createInstance(IGV igv) {
if (instance != null) {
if (igv == instance.igv) {
return instance;
}
throw new IllegalStateException("Cannot create another IGVMenuBar, use getInstance");
}
UIUtilities.invokeAndWaitOnEventThread(() ->instance = new IGVMenuBar(igv));
return instance;
}
public static IGVMenuBar getInstance() {
return instance;
}
private IGVMenuBar(IGV igv) {
this.igv = igv;
setBorder(new BasicBorders.MenuBarBorder(Color.GRAY, Color.GRAY));
setBorderPainted(true);
for (AbstractButton menu : createMenus()) {
add(menu);
}
IGVEventBus.getInstance().subscribe(GenomeChangeEvent.class, this);
//This is for Macs, so showing the about dialog
//from the command bar does what we want.
if (Globals.IS_MAC) {
DesktopIntegration.setAboutHandler(this);
DesktopIntegration.setQuitHandler();
}
}
private List<AbstractButton> createMenus() {
List<AbstractButton> menus = new ArrayList<AbstractButton>();
menus.add(createFileMenu());
menus.add(createGenomesMenu());
menus.add(createViewMenu());
menus.add(createTracksMenu());
menus.add(createRegionsMenu());
refreshToolsMenu();
menus.add(toolsMenu);
menus.add(createGenomeSpaceMenu());
extrasMenu = createExtrasMenu();
//extrasMenu.setVisible(false);
menus.add(extrasMenu);
try {
googleMenu = createGoogleMenu();
googleMenu.setVisible(PreferencesManager.getPreferences().getAsBoolean(ENABLE_GOOGLE_MENU));
menus.add(googleMenu);
} catch (IOException e) {
log.error("Error creating google menu: " + e.getMessage());
}
menus.add(createHelpMenu());
// Experimental -- remove for production release
return menus;
}
/**
* Generate the "tools" menu.
* This is imperative, it is written to field {@code toolsMenu}.
* Reason being, when we add (TODO remove)
* a new tool, we need to refresh just this menu
*/
void refreshToolsMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>(10);
// batch script
MenuAction menuAction = new RunScriptMenuAction("Run Batch Script...", KeyEvent.VK_X, igv);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// igvtools
menuAction = new SortTracksMenuAction("Run igvtools...", KeyEvent.VK_T, igv) {
@Override
public void actionPerformed(ActionEvent e) {
IgvToolsGui.launch(false, igv.getGenomeManager().getGenomeId());
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Motif finder
menuItems.add(MotifFinderPlugin.getMenuItem());
// BLAT
menuItems.add(BlatClient.getMenuItem());
// Combine data tracks
JMenuItem combineDataItem = new JMenuItem("Combine Data Tracks");
combineDataItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
CombinedDataSourceDialog dialog = new CombinedDataSourceDialog(IGV.getMainFrame());
dialog.setVisible(true);
}
});
menuItems.add(combineDataItem);
MenuAction toolsMenuAction = new MenuAction("Tools", null);
if (toolsMenu == null) {
toolsMenu = MenuAndToolbarUtils.createMenu(menuItems, toolsMenuAction);
toolsMenu.setName("Tools");
} else {
toolsMenu.removeAll();
for (JComponent item : menuItems) {
toolsMenu.add(item);
}
}
}
public void enableExtrasMenu() {
extrasMenu.setVisible(true);
}
JMenu createFileMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
menuItems.add(new JSeparator());
// Load menu items
menuAction = new LoadFilesMenuAction("Load from File...", KeyEvent.VK_L, igv);
menuAction.setToolTipText(UIConstants.LOAD_TRACKS_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new LoadFromURLMenuAction(LoadFromURLMenuAction.LOAD_FROM_URL, KeyEvent.VK_U, igv);
menuAction.setToolTipText(UIConstants.LOAD_TRACKS_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new LoadFromServerAction("Load from Server...", KeyEvent.VK_S, igv);
menuAction.setToolTipText(UIConstants.LOAD_SERVER_DATA_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// menuAction = new LoadFromURLMenuAction(LoadFromURLMenuAction.LOAD_FROM_DAS, KeyEvent.VK_D, igv);
// menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
if (PreferencesManager.getPreferences().getAsBoolean(DB_ENABLED)) {
menuAction = new LoadFromDatabaseAction("Load from Database...", 0, igv);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
}
encodeMenuItem = MenuAndToolbarUtils.createMenuItem(new BrowseEncodeAction("Load from ENCODE (2012)...", KeyEvent.VK_E, igv));
menuItems.add(encodeMenuItem);
String genomeId = IGV.getInstance().getGenomeManager().getGenomeId();
encodeMenuItem.setVisible (EncodeFileBrowser.genomeSupported(genomeId));
menuItems.add(new JSeparator());
// Session menu items
menuAction = new NewSessionMenuAction("New Session...", KeyEvent.VK_N, igv);
menuAction.setToolTipText(UIConstants.NEW_SESSION_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new OpenSessionMenuAction("Open Session...", KeyEvent.VK_O, igv);
menuAction.setToolTipText(UIConstants.RESTORE_SESSION_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new SaveSessionMenuAction("Save Session...", KeyEvent.VK_V, igv);
menuAction.setToolTipText(UIConstants.SAVE_SESSION_TOOLTIP);
JMenuItem saveSessionItem = MenuAndToolbarUtils.createMenuItem(menuAction);
menuItems.add(saveSessionItem);
menuItems.add(new JSeparator());
// Snapshot Application
menuAction =
new MenuAction("Save Image ...", null, KeyEvent.VK_A) {
@Override
public void actionPerformed(ActionEvent e) {
igv.saveImage(igv.getMainPanel());
}
};
menuAction.setToolTipText(SAVE_IMAGE_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// TODO -- change "Exit" to "Close" for BioClipse
menuItems.add(new JSeparator()); // Exit
menuAction =
new MenuAction("Exit", null, KeyEvent.VK_X) {
@Override
public void actionPerformed(ActionEvent e) {
doExitApplication();
}
};
menuAction.setToolTipText(EXIT_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Empty the recent sessions list before we start to do
// anything with it
igv.getRecentSessionList().clear();
// Retrieve the stored session paths
String recentSessions = PreferencesManager.getPreferences().getRecentSessions();
if (recentSessions != null) {
String[] sessions = recentSessions.split(";");
for (String sessionPath : sessions) {
if (!igv.getRecentSessionList().contains(sessionPath)) {
igv.getRecentSessionList().add(sessionPath);
}
}
}
if (!IGV.getInstance().getRecentSessionList().isEmpty()) {
menuItems.add(new JSeparator());
// Now add menu items
for (final String session : IGV.getInstance().getRecentSessionList()) {
OpenSessionMenuAction osMenuAction = new OpenSessionMenuAction(session, session, IGV.getInstance());
menuItems.add(MenuAndToolbarUtils.createMenuItem(osMenuAction));
}
}
MenuAction fileMenuAction = new MenuAction("File", null, KeyEvent.VK_F);
JMenu fileMenu = MenuAndToolbarUtils.createMenu(menuItems, fileMenuAction);
return fileMenu;
}
private JMenu createGenomesMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
// Load genome
menuAction =
new MenuAction("Load Genome from File...", null, KeyEvent.VK_I) {
@Override
public void actionPerformed(ActionEvent event) {
try {
File importDirectory = PreferencesManager.getPreferences().getLastGenomeImportDirectory();
if (importDirectory == null) {
PreferencesManager.getPreferences().setLastGenomeImportDirectory(DirectoryManager.getUserDirectory());
}
// Display the dialog
File file = FileDialogUtils.chooseFile("Load Genome", importDirectory, FileDialog.LOAD);
// If a file selection was made
if (file != null) {
GenomeManager.getInstance().loadGenome(file.getAbsolutePath(), null);
}
} catch (Exception e) {
MessageUtils.showErrorMessage(e.getMessage(), e);
}
}
};
menuAction.setToolTipText("Load a FASTA or .genome file...");
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Load genome from URL
menuAction = new LoadFromURLMenuAction(LoadFromURLMenuAction.LOAD_GENOME_FROM_URL, 0, igv);
menuAction.setToolTipText("Load a FASTA or .genome file...");
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Add genome to combo box from server
menuAction = new MenuAction("Load Genome From Server...", null) {
@Override
public void actionPerformed(ActionEvent event) {
GenomeComboBox.loadGenomeFromServer();
}
};
menuAction.setToolTipText(LOAD_GENOME_SERVER_TOOLTIP);
loadFromServerMenuItem = MenuAndToolbarUtils.createMenuItem(menuAction);
menuItems.add(loadFromServerMenuItem);
menuItems.add(new JSeparator());
// Add genome to combo box from server
menuAction = new MenuAction("Remove genomes ...", null) {
@Override
public void actionPerformed(ActionEvent event) {
RemoveGenomesDialog dialog2 = new RemoveGenomesDialog(IGV.getMainFrame());
dialog2.setVisible(true);
}
};
menuAction.setToolTipText("Remove genomes which appear in the dropdown list");
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuItems.add(new JSeparator());
menuAction =
new MenuAction("Create .genome File...", null, KeyEvent.VK_D) {
@Override
public void actionPerformed(ActionEvent event) {
javax.swing.ProgressMonitor monitor = new javax.swing.ProgressMonitor(IGV.getInstance().getMainPanel(),
"Creating genome", null, 0, 100);
igv.defineGenome(monitor);
}
};
menuAction.setToolTipText(UIConstants.IMPORT_GENOME_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
MenuAction genomeMenuAction = new MenuAction("Genomes", null);
return MenuAndToolbarUtils.createMenu(menuItems, genomeMenuAction);
}
private JMenu createTracksMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
// Sort Context
menuAction = new SortTracksMenuAction("Sort Tracks...", KeyEvent.VK_S, IGV.getInstance());
menuAction.setToolTipText(SORT_TRACKS_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new GroupTracksMenuAction("Group Tracks... ", KeyEvent.VK_G, IGV.getInstance());
menuAction.setToolTipText(UIConstants.GROUP_TRACKS_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Filter Tracks
filterTracksAction = new FilterTracksMenuAction("Filter Tracks...", KeyEvent.VK_F, IGV.getInstance());
filterTracksAction.setToolTipText(UIConstants.FILTER_TRACKS_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(filterTracksAction));
menuItems.add(new JSeparator());
// Reset Tracks
menuAction = new FitDataToWindowMenuAction("Fit Data to Window", KeyEvent.VK_W, IGV.getInstance());
menuAction.setToolTipText(UIConstants.FIT_DATA_TO_WINDOW_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Set track height
menuAction = new SetTrackHeightMenuAction("Set Track Height...", KeyEvent.VK_H, IGV.getInstance());
menuAction.setToolTipText(UIConstants.SET_DEFAULT_TRACK_HEIGHT_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
MenuAction dataMenuAction = new MenuAction("Tracks", null, KeyEvent.VK_K);
//menuItems.add(exportData);
return MenuAndToolbarUtils.createMenu(menuItems, dataMenuAction);
}
private JMenu createViewMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
// Preferences
menuAction =
new MenuAction("Preferences...", null, KeyEvent.VK_P) {
@Override
public void actionPerformed(ActionEvent e) {
UIUtilities.invokeOnEventThread(new Runnable() {
public void run() {
IGV.getInstance().doViewPreferences();
}
});
}
};
menuAction.setToolTipText(PREFERENCE_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction =
new MenuAction("Color Legends ...", null, KeyEvent.VK_H) {
@Override
public void actionPerformed(ActionEvent e) {
(new LegendDialog(IGV.getMainFrame())).setVisible(true);
}
};
menuAction.setToolTipText(SHOW_HEATMAP_LEGEND_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuItems.add(new JSeparator());
menuAction = new MenuAction("Show Name Panel", null, KeyEvent.VK_A) {
@Override
public void actionPerformed(ActionEvent e) {
JCheckBoxMenuItem menuItem = (JCheckBoxMenuItem) e.getSource();
if (menuItem.isSelected()) {
IGV.getInstance().getMainPanel().expandNamePanel();
} else {
IGV.getInstance().getMainPanel().collapseNamePanel();
}
IGV.getInstance().doRefresh();
}
};
boolean isShowing = IGV.getInstance().getMainPanel().isExpanded();
JCheckBoxMenuItem menuItem = new JCheckBoxMenuItem();
menuItem.setSelected(isShowing);
menuItem.setAction(menuAction);
menuItems.add(menuItem);
JMenuItem panelWidthmenuItem = new JMenuItem();
menuAction = new MenuAction("Set Name Panel Width...", null, KeyEvent.VK_A) {
@Override
public void actionPerformed(ActionEvent e) {
MainPanel mainPanel = IGV.getInstance().getMainPanel();
String currentValue = String.valueOf(mainPanel.getNamePanelWidth());
String newValue = MessageUtils.showInputDialog("Enter track name panel width: ", currentValue);
if (newValue != null) {
try {
Integer w = Integer.parseInt(newValue);
if (w <= 0 || w == 1000) throw new NumberFormatException();
PreferencesManager.getPreferences().put(NAME_PANEL_WIDTH, newValue);
mainPanel.setNamePanelWidth(w);
} catch (NumberFormatException ex) {
MessageUtils.showErrorMessage("Error: value must be a positive integer < 1000.", ex);
}
}
}
};
panelWidthmenuItem.setAction(menuAction);
menuItems.add(panelWidthmenuItem);
// Hide or Show the attribute panels
boolean isShow = PreferencesManager.getPreferences().getAsBoolean(SHOW_ATTRIBUTE_VIEWS_KEY);
IGV.getInstance().doShowAttributeDisplay(isShow); // <= WEIRD doing IGV.getInstance() here!
menuAction = new MenuAction("Show Attribute Display", null, KeyEvent.VK_A) {
@Override
public void actionPerformed(ActionEvent e) {
JCheckBoxMenuItem menuItem = (JCheckBoxMenuItem) e.getSource();
PreferencesManager.getPreferences().setShowAttributeView(menuItem.getState());
IGV.getInstance().getMainPanel().invalidate();
IGV.getInstance().doRefresh();
}
};
menuItem = MenuAndToolbarUtils.createMenuItem(menuAction, isShow);
menuItems.add(menuItem);
menuAction =
new MenuAction("Select Attributes to Show...", null, KeyEvent.VK_S) {
@Override
public void actionPerformed(ActionEvent e) {
IGV.getInstance().doSelectDisplayableAttribute();
}
};
menuAction.setToolTipText(SELECT_DISPLAYABLE_ATTRIBUTES_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new MenuAction("Show Header Panel", null, KeyEvent.VK_A) {
@Override
public void actionPerformed(ActionEvent e) {
JCheckBoxMenuItem menuItem = (JCheckBoxMenuItem) e.getSource();
if (menuItem.isSelected()) {
IGV.getInstance().getMainPanel().restoreHeader();
} else {
IGV.getInstance().getMainPanel().removeHeader();
}
IGV.getInstance().doRefresh();
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction, true));
menuItems.add(new JSeparator());
menuAction =
new MenuAction("Reorder Panels...", null, KeyEvent.VK_S) {
@Override
public void actionPerformed(ActionEvent e) {
ReorderPanelsDialog dlg = new ReorderPanelsDialog(IGV.getMainFrame());
dlg.setVisible(true);
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuItems.add(new JSeparator());
menuItems.add(new HistoryMenu("Go to"));
// Add to IGVPanel menu
MenuAction dataMenuAction = new MenuAction("View", null, KeyEvent.VK_V);
viewMenu = MenuAndToolbarUtils.createMenu(menuItems, dataMenuAction);
return viewMenu;
}
private JMenu createRegionsMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
menuAction = new NavigateRegionsMenuAction("Region Navigator ...", IGV.getInstance());
menuAction.setToolTipText(UIConstants.REGION_NAVIGATOR_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction =
new MenuAction("Gene Lists...", null, KeyEvent.VK_S) {
@Override
public void actionPerformed(ActionEvent e) {
(GeneListManagerUI.getInstance(IGV.getMainFrame())).setVisible(true);
}
};
menuAction.setToolTipText("Open gene list manager");
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuItems.add(new JSeparator());
// Export Regions
menuAction = new ExportRegionsMenuAction("Export Regions ...", KeyEvent.VK_E, IGV.getInstance());
menuAction.setToolTipText(UIConstants.EXPORT_REGION_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Import Regions
menuAction = new ImportRegionsMenuAction("Import Regions ...", KeyEvent.VK_I, IGV.getInstance());
menuAction.setToolTipText(IMPORT_REGION_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Import Regions
// menuAction = new ClearRegionsMenuAction("Clear Regions ...", IGV.getInstance());
// menuAction.setToolTipText(IMPORT_REGION_TOOLTIP);
// menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
MenuAction dataMenuAction = new MenuAction("Regions", null, KeyEvent.VK_V);
viewMenu = MenuAndToolbarUtils.createMenu(menuItems, dataMenuAction);
return viewMenu;
}
private JMenu createHelpMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
menuAction =
new MenuAction("User Guide ... ") {
@Override
public void actionPerformed(ActionEvent e) {
try {
BrowserLauncher.openURL(SERVER_BASE_URL + "igv/UserGuide");
} catch (IOException ex) {
log.error("Error opening browser", ex);
}
}
};
menuAction.setToolTipText(HELP_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
if (Desktop.isDesktopSupported()) {
final Desktop desktop = Desktop.getDesktop();
if (desktop.isSupported(Desktop.Action.MAIL)) {
menuAction =
new MenuAction("Help Forum...") {
@Override
public void actionPerformed(ActionEvent e) {
try {
URI uri = new URI("http://groups.google.com/forum/#!forum/igv-help");
Desktop.getDesktop().browse(uri);
} catch (Exception ex) {
log.error("Error opening igv-help uri", ex);
}
}
};
menuAction.setToolTipText("Email support");
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
}
}
menuAction =
new MenuAction("Check for Updates...") {
@Override
public void actionPerformed(ActionEvent e) {
checkVersion();
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction =
new MenuAction("About IGV ") {
@Override
public void actionPerformed(ActionEvent e) {
(new AboutDialog(IGV.getMainFrame(), true)).setVisible(true);
}
};
menuAction.setToolTipText(ABOUT_TOOLTIP);
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
MenuAction helpMenuAction = new MenuAction("Help");
return MenuAndToolbarUtils.createMenu(menuItems, helpMenuAction);
}
private void checkVersion() {
int readTimeout = Globals.READ_TIMEOUT;
int connectTimeout = Globals.CONNECT_TIMEOUT;
try {
Main.Version thisVersion = Main.Version.getVersion(Globals.VERSION);
if (thisVersion != null) {
Globals.CONNECT_TIMEOUT = 5000;
Globals.READ_TIMEOUT = 1000;
final String serverVersionString = HttpUtils.getInstance().getContentsAsString(HttpUtils.createURL(Globals.getVersionURL())).trim();
// See if user has specified to skip this update
final String skipString = PreferencesManager.getPreferences().get(SKIP_VERSION);
HashSet<String> skipVersion = new HashSet<String>(Arrays.asList(skipString.split(",")));
if (skipVersion.contains(serverVersionString)) return;
Main.Version serverVersion = Main.Version.getVersion(serverVersionString.trim());
if (serverVersion == null) return;
if (thisVersion.lessThan(serverVersion)) {
log.info("A later version of IGV is available (" + serverVersionString + ")");
final VersionUpdateDialog dlg = new VersionUpdateDialog(serverVersionString);
dlg.setVisible(true);
if (dlg.isSkipVersion()) {
String newSkipString = skipString + "," + serverVersionString;
PreferencesManager.getPreferences().put(SKIP_VERSION, newSkipString);
}
} else {
MessageUtils.showMessage("IGV is up to date");
}
} else {
if (Globals.VERSION.contains("3.0_beta") || Globals.VERSION.contains("snapshot")) {
HttpUtils.getInstance().getContentsAsString(HttpUtils.createURL(Globals.getVersionURL())).trim();
}
}
} catch (Exception e) {
log.error("Error checking version", e);
} finally {
Globals.CONNECT_TIMEOUT = connectTimeout;
Globals.READ_TIMEOUT = readTimeout;
}
}
private JMenu createGenomeSpaceMenu() {
JMenu menu = new JMenu("GenomeSpace");
MenuAction menuAction = null;
menuAction = new LoadFromGSMenuAction("Load File from GenomeSpace...", KeyEvent.VK_U, igv);
menu.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menu.addSeparator();
menuAction = new LoadGenomeFromGSMenuAction("Load Genome from GenomeSpace...", KeyEvent.VK_Z, igv);
menu.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menu.addSeparator();
menuAction = new GSSaveSessionMenuAction("Save Session to GenomeSpace...", igv);
menu.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new GSOpenSessionMenuAction("Load Session from GenomeSpace...", igv);
menu.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menu.add(new JSeparator());
menuAction = new MenuAction("Logout") {
@Override
public void actionPerformed(ActionEvent e) {
GSUtils.logout();
if (MessageUtils.confirm("You must shutdown IGV to complete the GenomeSpace logout. Shutdown now?")) {
doExitApplication();
}
}
};
menu.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menu.add(new JSeparator());
menuAction =
new MenuAction("Register... ") {
@Override
public void actionPerformed(ActionEvent e) {
try {
BrowserLauncher.openURL(GENOMESPACE_REG_PAGE);
} catch (IOException ex) {
log.error("Error opening browser", ex);
}
}
};
menuAction.setToolTipText(GENOMESPACE_REG_TOOLTIP);
menu.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menu.setVisible(PreferencesManager.getPreferences().getAsBoolean(GENOME_SPACE_ENABLE));
return menu;
}
private JMenu createExtrasMenu() {
List<JComponent> menuItems = new ArrayList<JComponent>();
MenuAction menuAction = null;
// Preferences reset
menuAction = new ResetPreferencesAction("Reset Preferences", IGV.getInstance());
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuItems.add(new JSeparator());
// Set frame dimensions
menuAction =
new MenuAction("Set window dimensions", null, KeyEvent.VK_C) {
@Override
public void actionPerformed(ActionEvent e) {
String value = JOptionPane.showInputDialog("Enter dimensions, e.g. 800x400");
if (value != null) {
String[] vals = value.split("x");
if (vals.length == 2) {
int w = Integer.parseInt(vals[0]);
int h = Integer.parseInt(vals[1]);
IGV.getMainFrame().setSize(w, h);
}
}
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
// Save entire window
menuAction =
new MenuAction("Save Screenshot ...", null, KeyEvent.VK_A) {
@Override
public void actionPerformed(ActionEvent e) {
IGV.getInstance().saveImage(IGV.getInstance().getContentPane());
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new ExportTrackNamesMenuAction("Export track names...", IGV.getInstance());
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
menuAction = new MenuAction("Scatter Plot ...") {
@Override
public void actionPerformed(ActionEvent e) {
final ReferenceFrame defaultFrame = FrameManager.getDefaultFrame();
String chr = defaultFrame.getChrName();
int start = (int) defaultFrame.getOrigin();
int end = (int) defaultFrame.getEnd();
int zoom = defaultFrame.getZoom();
ScatterPlotUtils.openPlot(chr, start, end, zoom);
}
};
menuItems.add(MenuAndToolbarUtils.createMenuItem(menuAction));
MenuAction extrasMenuAction = new MenuAction("Extras");
JMenu menu = MenuAndToolbarUtils.createMenu(menuItems, extrasMenuAction);
JMenu lfMenu = new JMenu("L&F");
LookAndFeel lf = UIManager.getLookAndFeel();
for (UIManager.LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) {
final String lfName = info.getName();
JMenuItem cb = new JMenuItem(lfName);
//cb.setSelected(info.getClassName().equals(lf.getClass().getName());
cb.addActionListener(new AbstractAction() {
public void actionPerformed(ActionEvent actionEvent) {
for (UIManager.LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) {
if (lfName.equals(info.getName())) {
try {
UIManager.setLookAndFeel(info.getClassName());
} catch (Exception e) {
e.printStackTrace();
}
break;
}
}
}
});
lfMenu.add(cb);
}
menu.add(lfMenu);
menu.setVisible(false);
return menu;
}
private JMenu createGoogleMenu() throws IOException {
// Dynamically name menu - dwm08
final OAuthUtils oauth = OAuthUtils.getInstance();
JMenu menu = new JMenu(oauth.authProvider);
final JMenuItem login = new JMenuItem("Login ... ");
login.addActionListener(e -> {
try {
oauth.openAuthorizationPage();
} catch (Exception ex) {
MessageUtils.showErrorMessage("Error fetching oAuth tokens. See log for details", ex);
log.error("Error fetching oAuth tokens", ex);
}
});
login.setEnabled(false);
menu.add(login);
final JMenuItem logout = new JMenuItem("Logout ");
logout.addActionListener(e -> {
oauth.logout();
GoogleUtils.setProjectID(null);
});
logout.setEnabled(false);
menu.add(logout);
final JMenuItem projectID = new JMenuItem("Enter Project ID ...");
projectID.addActionListener(e -> GoogleUtils.enterGoogleProjectID());
menu.add(projectID);
menu.addMenuListener(new MenuListener() {
@Override
public void menuSelected(MenuEvent e) {
Runnable runnable = () -> {
boolean loggedIn = oauth.isLoggedIn();
if (loggedIn) {
login.setText(oauth.getCurrentUserName());
} else {
login.setText("Login ...");
}
login.setEnabled(!loggedIn);
logout.setEnabled(loggedIn);
};
LongRunningTask.submit(runnable);
}
@Override
public void menuDeselected(MenuEvent e) {
}
@Override
public void menuCanceled(MenuEvent e) {
}
});
return menu;
}
// public void enableRemoveGenomes() {
// if (removeImportedGenomeAction != null) {
// removeImportedGenomeAction.setEnabled(true);
public void resetSessionActions() {
if (filterTracksAction != null) {
filterTracksAction.resetTrackFilter();
}
}
public void setFilterMatchAll(boolean value) {
if (filterTracksAction != null) {
filterTracksAction.setFilterMatchAll(value);
}
}
public boolean isFilterMatchAll() {
if (filterTracksAction != null) {
return filterTracksAction.isFilterMatchAll();
}
return false;
}
public void setFilterShowAllTracks(boolean value) {
if (filterTracksAction != null) {
filterTracksAction.setFilterShowAllTracks(value);
}
}
public boolean isFilterShowAllTracks() {
if (filterTracksAction != null) {
return filterTracksAction.getShowAllTracksFilterCheckBox().isSelected();
}
return false;
}
public JMenu getViewMenu() {
return viewMenu;
}
final public void doExitApplication() {
try {
IGV.getInstance().saveStateForExit();
Frame mainFrame = IGV.getMainFrame();
PreferencesManager.getPreferences().setApplicationFrameBounds(mainFrame.getBounds());
// Hide and close the application
mainFrame.setVisible(false);
mainFrame.dispose();
} finally {
System.exit(0);
}
}
@ForTesting
static void destroyInstance() {
instance = null;
}
public void enableGoogleMenu(boolean aBoolean) {
googleMenu.setVisible(aBoolean);
}
@Override
public void receiveEvent(final Object event) {
if(event instanceof GenomeChangeEvent) {
UIUtilities.invokeOnEventThread(() -> encodeMenuItem.setVisible (EncodeFileBrowser.genomeSupported(((GenomeChangeEvent) event).genome.getId())));
}
}
}
|
package org.jtrfp.trcl.flow;
import java.awt.Point;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.apache.commons.math3.geometry.euclidean.threed.Vector3D;
import org.jtrfp.trcl.Camera;
import org.jtrfp.trcl.DisplayModeHandler;
import org.jtrfp.trcl.NAVSystem;
import org.jtrfp.trcl.OverworldSystem;
import org.jtrfp.trcl.SkySystem;
import org.jtrfp.trcl.Tunnel;
import org.jtrfp.trcl.World;
import org.jtrfp.trcl.beh.CollidesWithTerrain;
import org.jtrfp.trcl.beh.CollidesWithTunnelWalls;
import org.jtrfp.trcl.beh.HeadingXAlwaysPositiveBehavior;
import org.jtrfp.trcl.beh.LoopingPositionBehavior;
import org.jtrfp.trcl.beh.MatchDirection;
import org.jtrfp.trcl.beh.MatchPosition;
import org.jtrfp.trcl.beh.SkyCubeCloudModeUpdateBehavior;
import org.jtrfp.trcl.beh.phy.MovesByVelocity;
import org.jtrfp.trcl.core.Features;
import org.jtrfp.trcl.core.Renderer;
import org.jtrfp.trcl.core.ResourceManager;
import org.jtrfp.trcl.core.TR;
import org.jtrfp.trcl.file.AbstractTriplet;
import org.jtrfp.trcl.file.DirectionVector;
import org.jtrfp.trcl.file.LVLFile;
import org.jtrfp.trcl.file.Location3D;
import org.jtrfp.trcl.file.NAVFile.NAVSubObject;
import org.jtrfp.trcl.file.NAVFile.START;
import org.jtrfp.trcl.file.TDFFile;
import org.jtrfp.trcl.flow.LoadingProgressReporter.UpdateHandler;
import org.jtrfp.trcl.flow.NAVObjective.Factory;
import org.jtrfp.trcl.obj.ObjectDirection;
import org.jtrfp.trcl.obj.Player;
import org.jtrfp.trcl.obj.PortalExit;
import org.jtrfp.trcl.obj.Projectile;
import org.jtrfp.trcl.obj.ProjectileFactory;
import org.jtrfp.trcl.obj.Propelled;
import org.jtrfp.trcl.obj.TunnelEntranceObject;
import org.jtrfp.trcl.obj.WorldObject;
import org.jtrfp.trcl.shell.GameShell;
import org.jtrfp.trcl.snd.GPUResidentMOD;
import org.jtrfp.trcl.snd.MusicPlaybackEvent;
import org.jtrfp.trcl.snd.SoundSystem;
public class Mission {
// PROPERTIES
public static final String MISSION_MODE = "missionMode";
public static final String SATELLITE_VIEW = "satelliteView";
private final TR tr;
private final List<NAVObjective>
navs = new LinkedList<NAVObjective>();
private final LVLFile lvl;
private final HashMap<String, Tunnel>
tunnels = new HashMap<String, Tunnel>();
private final HashMap<Integer, PortalExit>
tunnelPortals = new HashMap<Integer, PortalExit>();
private double[] playerStartPosition
= new double[3];
private List<NAVSubObject> navSubObjects;
private ObjectDirection playerStartDirection;
private final Game game;
private final String levelName;
private OverworldSystem overworldSystem;
private final Result[] missionEnd = new Result[]{null};
private int groundTargetsDestroyed=0,
airTargetsDestroyed=0,
foliageDestroyed=0;
private int totalNumTunnels;
private final LinkedList<Tunnel>
tunnelsRemaining = new LinkedList<Tunnel>();
private final boolean showIntro;
private volatile MusicPlaybackEvent
bgMusic;
private final Object missionLock = new Object();
private final Map<Integer,TunnelEntranceObject>
tunnelMap = new HashMap<Integer,TunnelEntranceObject>();
private boolean bossFight = false, satelliteView = false;
private MissionMode missionMode = new Mission.LoadingMode();
private final PropertyChangeSupport pcs = new PropertyChangeSupport(this);
private Tunnel currentTunnel;
private final DisplayModeHandler displayHandler;
private Object [] levelLoadingMode, gameplayMode, briefingMode, summaryMode, emptyMode= new Object[]{};
private enum LoadingStages {
navs, tunnels, overworld
}// end LoadingStages
//ROOT STATES
public interface MissionState extends Game.GameRunningMode{}
public interface ConstructingState extends MissionState{}
public interface ConstructedState extends MissionState{}
public interface ActiveMissionState extends ConstructedState{}
public interface LoadingState extends ActiveMissionState{}
public interface GameplayState extends ActiveMissionState{}
public interface Briefing extends GameplayState{}
public interface PlanetBrief extends Briefing{}
public interface EnemyBrief extends Briefing{}
public interface MissionSummary extends Briefing{}
public interface PlayerActivity extends GameplayState{}
public interface OverworldState extends PlayerActivity{}
public interface ChamberState extends OverworldState{}
public interface TunnelState extends PlayerActivity{}
public Mission(TR tr, Game game, LVLFile lvl, String levelName, boolean showIntro) {
this.tr = tr;
this.lvl = lvl;
this.game = game;
this.levelName = levelName;
this.showIntro = showIntro;
this.displayHandler = new DisplayModeHandler(tr.getDefaultGrid());
Features.init(this);
tr.setRunState(new ConstructingState(){});
levelLoadingMode = new Object[]{
((TVF3Game)game).levelLoadingScreen,
((TVF3Game)game).upfrontDisplay
};
tr.setRunState(new ConstructedState(){});
}// end Mission
public Result go() {
tr.setRunState(new LoadingState(){});
setMissionMode(new Mission.LoadingMode());
synchronized(missionLock){
synchronized(missionEnd){
if(missionEnd[0]!=null)
return missionEnd[0];
}
tr.getThreadManager().setPaused(true);
for(ProjectileFactory pf:tr.getResourceManager().getProjectileFactories())
for(Projectile proj:pf.getProjectiles())
proj.destroy();
System.out.println("Starting GampeplayLevel loading sequence...");
final LoadingProgressReporter rootProgress = LoadingProgressReporter.Impl
.createRoot(new UpdateHandler() {
@Override
public void update(double unitProgress) {
((TVF3Game)game).getLevelLoadingScreen().setLoadingProgress(unitProgress);
}
});
final LoadingProgressReporter[] progressStages = rootProgress
.generateSubReporters(LoadingStages.values().length);
final Renderer renderer = tr.mainRenderer.get();
renderer.getCamera().probeForBehavior(SkyCubeCloudModeUpdateBehavior.class).setEnable(false);
renderer.getSkyCube().setSkyCubeGen(GameShell.DEFAULT_GRADIENT);
final Camera camera = renderer.getCamera();
camera.setHeading(Vector3D.PLUS_I);
camera.setTop(Vector3D.PLUS_J);
((TVF3Game)game).levelLoadingMode();
displayHandler.setDisplayMode(levelLoadingMode);
//((TVF3Game)game).setDisplayMode(((TVF3Game)game).levelLoadingMode);
((TVF3Game)game).getUpfrontDisplay().submitPersistentMessage(levelName);
try {
final ResourceManager rm = tr.getResourceManager();
final Player player = ((TVF3Game)tr.getGame()).getPlayer();
final TDFFile tdf = rm.getTDFData(lvl.getTunnelDefinitionFile());
player.setActive(false);
// Abort check
synchronized(missionEnd){
if(missionEnd[0]!=null)
return missionEnd[0];
}
overworldSystem = new OverworldSystem(tr,
progressStages[LoadingStages.overworld.ordinal()]);
briefingMode = new Object[]{
((TVF3Game)game).briefingScreen,
overworldSystem
};
gameplayMode = new Object[]{
((TVF3Game)game).navSystem,
((TVF3Game)game).hudSystem,
((TVF3Game)game).upfrontDisplay,
overworldSystem,
rm.getDebrisSystem(),
rm.getPowerupSystem(),
rm.getProjectileFactories(),
rm.getExplosionFactory(),
rm.getSmokeSystem()
};
summaryMode = new Object[]{
((TVF3Game)game).getBriefingScreen(),
overworldSystem
};
getOverworldSystem().loadLevel(lvl, tdf);
System.out.println("\t...Done.");
// Install NAVs
final NAVSystem navSystem = ((TVF3Game)tr.getGame()).getNavSystem();
navSubObjects = rm.getNAVData(lvl.getNavigationFile())
.getNavObjects();
START s = (START) navSubObjects.get(0);
Location3D l3d = s.getLocationOnMap();
playerStartPosition[0] = TR.legacy2Modern(l3d.getZ());
playerStartPosition[2] = TR.legacy2Modern(l3d.getX());
final double HEIGHT_PADDING = 10000;
playerStartPosition[1] = Math.max(HEIGHT_PADDING + getOverworldSystem().getAltitudeMap().heightAt(
TR.legacy2Modern(l3d.getZ()),
TR.legacy2Modern(l3d.getX())),TR.legacy2Modern(l3d.getY()));
playerStartDirection = new ObjectDirection(s.getRoll(),
s.getPitch(), s.getYaw());
// ////// INITIAL HEADING
player.setPosition(getPlayerStartPosition());
player.setDirection(getPlayerStartDirection());
player.setHeading(player.getHeading().negate());// Kludge to fix
// incorrect heading
///////// STATE
final Propelled propelled = player.probeForBehavior(Propelled.class);
propelled.setPropulsion(propelled.getMinPropulsion());
installTunnels(tdf,progressStages[LoadingStages.tunnels.ordinal()]);
Factory f = new NAVObjective.Factory(tr);
final LoadingProgressReporter[] navProgress = progressStages[LoadingStages.navs
.ordinal()].generateSubReporters(navSubObjects.size());
for (int i = 0; i < navSubObjects.size(); i++) {
final NAVSubObject obj = navSubObjects.get(i);
f.create(tr, obj, navs);
navProgress[i].complete();
}// end for(navSubObjects)
navSystem.updateNAVState();
player.resetVelocityRotMomentum();
final String startX = System.getProperty("org.jtrfp.trcl.startX");
final String startY = System.getProperty("org.jtrfp.trcl.startY");
final String startZ = System.getProperty("org.jtrfp.trcl.startZ");
final double[] playerPos = player.getPosition();
if (startX != null && startY != null && startZ != null) {
System.out.println("Using user-specified start point");
final int sX = Integer.parseInt(startX);
final int sY = Integer.parseInt(startY);
final int sZ = Integer.parseInt(startZ);
playerPos[0] = sX;
playerPos[1] = sY;
playerPos[2] = sZ;
player.notifyPositionChange();
}// end if(user start point)
System.out.println("Start position set to " + player.getPosition()[0]+" "+player.getPosition()[1]+" "+player.getPosition()[2]);
System.out.println("Setting sun vector");
final AbstractTriplet sunVector = lvl.getSunlightDirectionVector();
tr.getThreadManager().submitToGL(new Callable<Void>() {
@Override
public Void call() throws Exception {
tr.mainRenderer.get().setSunVector(
new Vector3D(sunVector.getX(), sunVector.getY(),
sunVector.getZ()).normalize());
return null;
}
}).get();
System.out.println("\t...Done.");
} catch (Exception e) {
e.printStackTrace();
}
if (System.getProperties().containsKey(
"org.jtrfp.trcl.flow.Mission.skipNavs")) {
try {
final int skips = Integer.parseInt(System
.getProperty("org.jtrfp.trcl.flow.Mission.skipNavs"));
System.out.println("Skipping " + skips + " navs.");
for (int i = 0; i < skips; i++) {
removeNAVObjective(currentNAVObjective());
}// end for(skips)
} catch (NumberFormatException e) {
System.err
.println("Invalid format for property \"org.jtrfp.trcl.flow.Mission.skipNavs\". Must be integer.");
}
}// end if(containsKey)
// Transition to gameplay mode.
// Abort check
synchronized (missionEnd) {
if (missionEnd[0] != null)
return missionEnd[0];
}//end sync(missionEnd)
final SoundSystem ss = Mission.this.tr.soundSystem.get();
MusicPlaybackEvent evt;
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(new GPUResidentMOD(tr, tr
.getResourceManager().getMOD(
lvl.getBackgroundMusicFile())),
true));
synchronized(Mission.this){
if(bgMusic==null){
bgMusic=evt;
bgMusic.play();
}
}//end sync(Mission.this)
((TVF3Game)game).getUpfrontDisplay().removePersistentMessage();
tr.getThreadManager().setPaused(false);
if(showIntro){
tr.setRunState(new Briefing(){});
setMissionMode(new Mission.IntroMode());
displayHandler.setDisplayMode(briefingMode);
((TVF3Game)game).getBriefingScreen().briefingSequence(lvl);//TODO: Convert to feature
}
setMissionMode(new Mission.AboveGroundMode());
final SkySystem skySystem = getOverworldSystem().getSkySystem();
tr.mainRenderer.get().getCamera().probeForBehavior(SkyCubeCloudModeUpdateBehavior.class).setEnable(true);
renderer.getSkyCube().setSkyCubeGen(skySystem.getBelowCloudsSkyCubeGen());
renderer.setAmbientLight(skySystem.getSuggestedAmbientLight());
renderer.setSunColor(skySystem.getSuggestedSunColor());
((TVF3Game)game).getNavSystem() .activate();
displayHandler.setDisplayMode(gameplayMode);
((TVF3Game)game).getPlayer() .setActive(true);
((TVF3Game)tr.getGame()).setPaused(false);
tr.setRunState(new PlayerActivity(){});
//Wait for mission end
synchronized(missionEnd){
while(missionEnd[0]==null){try{missionEnd.wait();}
catch(InterruptedException e){break;}}}
//Completion summary
tr.setRunState(new Briefing(){});
if(missionEnd[0]!=null)
if(!missionEnd[0].isAbort()){
displayHandler.setDisplayMode(summaryMode);
setMissionMode(new Mission.MissionSummaryMode());
((TVF3Game)game).getBriefingScreen().missionCompleteSummary(lvl,missionEnd[0]);
}//end if(proper ending)
bgMusic.stop();
cleanup();
return missionEnd[0];
}//end sync
}// end go()
public NAVObjective currentNAVObjective() {
if (navs.isEmpty())
return null;
return navs.get(0);
}//end currentNAVObjective()
public void removeNAVObjective(NAVObjective o) {
navs.remove(o);
if (navs.size() == 0) {
missionCompleteSequence();
} else
((TVF3Game)tr.getGame()).getNavSystem().updateNAVState();
}// end removeNAVObjective(...)
public static class Result {
private final int airTargetsDestroyed, groundTargetsDestroyed,foliageDestroyed;
private final double tunnelsFoundPctNorm;
private boolean abort=false;
public Result(int airTargetsDestroyed, int groundTargetsDestroyed, int foliageDestroyed, double tunnelsFoundPctNorm) {
this.airTargetsDestroyed =airTargetsDestroyed;
this.groundTargetsDestroyed =groundTargetsDestroyed;
this.foliageDestroyed =foliageDestroyed;
this.tunnelsFoundPctNorm =tunnelsFoundPctNorm;
}//end constructor
/**
* @return the airTargetsDestroyed
*/
public int getAirTargetsDestroyed() {
return airTargetsDestroyed;
}
/**
* @return the groundTargetsDestroyed
*/
public int getGroundTargetsDestroyed() {
return groundTargetsDestroyed;
}
/**
* @return the foliageDestroyed
*/
public int getFoliageDestroyed() {
return foliageDestroyed;
}
/**
* @return the tunnelsFoundPctNorm
*/
public double getTunnelsFoundPctNorm() {
return tunnelsFoundPctNorm;
}
/**
* @return the abort
*/
public boolean isAbort() {
return abort;
}
/**
* @param abort the abort to set
*/
public void setAbort(boolean abort) {
this.abort = abort;
}
}// end Result
/**
* @return the playerStartPosition
*/
public double[] getPlayerStartPosition() {
return playerStartPosition;
}
/**
* @return the playerStartDirection
*/
public ObjectDirection getPlayerStartDirection() {
return playerStartDirection;
}
private void installTunnels(TDFFile tdf, LoadingProgressReporter reporter){
TDFFile.Tunnel[] tuns = tdf.getTunnels();
tuns = tuns == null?new TDFFile.Tunnel[0]:tuns;//Null means no tunnels.
final LoadingProgressReporter[] reporters = reporter
.generateSubReporters(tuns.length);
if (tuns != null) {
int tIndex = 0;
// Build tunnels
for (TDFFile.Tunnel tun : tuns) {
tr
.getReporter()
.report("org.jtrfp.trcl.TunnelInstaller.tunnel."
+ tIndex + ".entrance", tun.getEntrance().toString());
tr
.getReporter()
.report("org.jtrfp.trcl.TunnelInstaller.tunnel."
+ tIndex + ".exit", tun.getExit().toString());
newTunnel(tun,reporters[tIndex]);
tIndex++;
}//end if(tuns!=null)
}// end if(tuns!=null)
totalNumTunnels = tunnelsRemaining.size();
}//end installTunnels()
private Tunnel newTunnel(org.jtrfp.trcl.file.TDFFile.Tunnel tdfTun,
LoadingProgressReporter reporter) {
final Tunnel tunnel = new Tunnel(tr, tdfTun, reporter);
tunnelsRemaining.add(tunnel);
DirectionVector tunnelEntranceLegacyPos = tdfTun.getEntrance();
final Point tunnelEntranceMapSquarePos = new Point(
(int)(TR.legacy2MapSquare(tunnelEntranceLegacyPos.getZ())),
(int)(TR.legacy2MapSquare(tunnelEntranceLegacyPos.getX())));
addTunnelEntrance(tunnelEntranceMapSquarePos,tunnel);
PortalExit portalExit = getTunnelEntrancePortal(tunnelEntranceMapSquarePos);
if(portalExit!=null){
portalExit.setHeading(Tunnel.TUNNEL_START_DIRECTION.getHeading());
portalExit.setTop(Tunnel.TUNNEL_START_DIRECTION.getTop());
portalExit.setPosition(Tunnel.TUNNEL_START_POS.toArray());
portalExit.notifyPositionChange();
portalExit.setRootGrid(tunnel);
}else throw new NullPointerException("Null portal exit! "+tunnelEntranceMapSquarePos);
DirectionVector tunnelExitLegacyPos = tdfTun.getExit();
final Point tunnelExitMapSquarePos = new Point(
(int)(TR.legacy2MapSquare(tunnelExitLegacyPos.getZ())),
(int)(TR.legacy2MapSquare(tunnelExitLegacyPos.getX())));
System.out.println("Tunnel exit at sector "+tunnelExitMapSquarePos);
portalExit = getTunnelEntrancePortal(tunnelExitMapSquarePos);
/*if(portalExit!=null){
portalExit.setHeading(tunnel.getExitObject().getHeading().negate());
portalExit.setTop(tunnel.getExitObject().getTop());
portalExit.setPosition(tunnel.getExitObject().getPosition());
portalExit.notifyPositionChange();
portalExit.setRootGrid(tunnel);
}else System.err.println("Null exit.");*/
tunnels.put(tdfTun.getTunnelLVLFile().toUpperCase(), tunnel);
return tunnel;
}
public Tunnel getTunnelByFileName(String tunnelFileName) {
return tunnels.get(tunnelFileName.toUpperCase());
}
public TunnelEntranceObject getNearestTunnelEntrance(double xInLegacyUnits,
double yInLegacyUnits, double zInLegacyUnits) {
TunnelEntranceObject result = null;
double closestDistance = Double.POSITIVE_INFINITY;
final Vector3D entPos = new Vector3D(
TR.legacy2Modern(zInLegacyUnits),//Intentionally backwards
TR.legacy2Modern(yInLegacyUnits),
TR.legacy2Modern(xInLegacyUnits)
);
System.out.println("Requested entry pos="+entPos);
for (TunnelEntranceObject teo : tunnelMap.values()) {
final Vector3D pos = new Vector3D(teo.getPosition());
System.out.println("Found tunnel at "+pos);
final double distance = pos.distance(entPos);
if (distance < closestDistance) {
closestDistance = distance;
result = teo;
}
}// end for(tunnels)
return result;
}// end getTunnelWhoseEntranceClosestTo(...)
private void missionCompleteSequence() {
new Thread() {
@Override
public void run() {
// TODO: Behavior change: Camera XZ static, lag Y by ~16
// squares, heading/top affix toward player
// TODO: Turn off all player control behavior
// TODO: Behavior change: Player turns upward, top rolls on
// heading, speed at full throttle
// TODO: Wait 3 seconds
// TODO: Lightning shell on
// TODO: Wait 1 second
// TODO: Turbo forward
// TODO: Wait 500ms
// TODO: Jet thrust noise
// TODO: Player invisible.
System.out.println("MISSION COMPLETE.");
notifyMissionEnd(
new Result(
airTargetsDestroyed,
groundTargetsDestroyed,
foliageDestroyed,
1.-(double)tunnelsRemaining.size()/(double)totalNumTunnels));
}// end run()
}.start();
}//end missionCompleteSequence()
public void playerDestroyed() {
new Thread() {
@Override
public void run() {
// TODO Behavior change: Camera XYZ static, heading/top affix
// toward player
// TODO: Turn off all player control behavior
// TODO Player behavior change: Slow spin along heading axis,
// slow downward drift of heading
// TODO: Add behavior: explode and destroy on impact with ground
System.out.println("MISSION FAILED.");
notifyMissionEnd(null);
}// end run()
}.start();
}// end playerDestroyed()
private void notifyMissionEnd(Result r){
synchronized(missionEnd){
missionEnd[0]=r;
missionEnd.notifyAll();}
}//end notifyMissionEnd()
public List<NAVObjective> getRemainingNAVObjectives() {
return navs;
}
/**
* @return the navSubObjects
*/
public List<NAVSubObject> getNavSubObjects() {
return navSubObjects;
}
/**
* @param navSubObjects
* the navSubObjects to set
*/
public void setNavSubObjects(List<NAVSubObject> navSubObjects) {
this.navSubObjects = navSubObjects;
}
public void missionComplete() {
missionCompleteSequence();
}
public OverworldSystem getOverworldSystem() {
return overworldSystem;
}
public Mission notifyAirTargetDestroyed(){
airTargetsDestroyed++;
return this;
}
public Mission notifyGroundTargetDestroyed(){
groundTargetsDestroyed++;
return this;
}
public Mission notifyTunnelFound(Tunnel tun){
tunnelsRemaining.remove(tun);
return this;
}
public Mission notifyFoliageDestroyed(){
foliageDestroyed++;
return this;
}
public void enterBossMode(final String bossMusicFile){
setBossFight(true);
tr.getThreadManager().submitToThreadPool(new Callable<Void>() {
@Override
public Void call() throws Exception {
MusicPlaybackEvent evt;
final SoundSystem ss = Mission.this.tr.soundSystem.get();
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(tr.getResourceManager().gpuResidentMODs.get(bossMusicFile),
true));
synchronized(Mission.this){
evt.play();
if(bgMusic!=null)
bgMusic.stop();
bgMusic=evt;
}
return null;
}// end call()
});
}//end enterBossMode()
public void exitBossMode(){
setBossFight(false);
tr.getThreadManager().submitToThreadPool(new Callable<Void>() {
@Override
public Void call() throws Exception {
MusicPlaybackEvent evt;
final SoundSystem ss = Mission.this.tr.soundSystem.get();
Mission.this.tr.soundSystem.get().enqueuePlaybackEvent(
evt =ss
.getMusicFactory()
.create(tr.getResourceManager().gpuResidentMODs.get(lvl.getBackgroundMusicFile()),
true));
synchronized(Mission.this){
evt.play();
bgMusic.stop();
bgMusic=evt;}
return null;
}// end call()
});
}//end exitBossMode()
public void abort() {
final Result result = new Result(
airTargetsDestroyed,
groundTargetsDestroyed,
foliageDestroyed,
1.-(double)tunnelsRemaining.size()/(double)totalNumTunnels);
result.setAbort(true);
notifyMissionEnd(result);
//Wait for mission to end
synchronized(missionLock){//Don't execute while mission is in progress.
cleanup();
}//end sync{}
}//end abort()
private void cleanup() {
displayHandler.setDisplayMode(emptyMode);
tr.secondaryRenderer.get().getCamera().setRootGrid(null);
// Remove projectile factories
for(ProjectileFactory pf:tr.getResourceManager().getProjectileFactories())
for(Projectile projectile:pf.getProjectiles())
projectile.destroy();
}
/**
* Find a tunnel at the given map square, if any.
* @param mapSquareXZ Position in cells, not world coords.
* @return The Tunnel at this map square, or null if none here.
* @since Jan 13, 2015
*/
public TunnelEntranceObject getTunnelEntranceObject(Point mapSquareXZ){
final int key = pointToHash(mapSquareXZ);
System.out.println("getTunnelEntranceObject "+mapSquareXZ);
for(TunnelEntranceObject teo:tunnelMap.values())
System.out.print(" "+new Vector3D(teo.getPosition()).scalarMultiply(1/TR.mapSquareSize));
System.out.println();
return tunnelMap.get(key);
}
public void registerTunnelEntrancePortal(Point mapSquareXZ, PortalExit exit){
synchronized(tunnelPortals){
tunnelPortals.put(pointToHash(mapSquareXZ),exit);}
}
PortalExit getTunnelEntrancePortal(Point mapSquareXZ){
synchronized(tunnelPortals){
return tunnelPortals.get(pointToHash(mapSquareXZ));}
}
public void addTunnelEntrance(Point mapSquareXZ, Tunnel tunnel){
TunnelEntranceObject teo;
overworldSystem.add(teo = new TunnelEntranceObject(tr,tunnel));
tunnelMap.put(pointToHash(mapSquareXZ),teo);
}
private int pointToHash(Point point){
final int key =(int)point.getX()+(int)point.getY()*65536;
return key;
}
public synchronized void enterTunnel(final Tunnel tunnel) {
System.out.println("Entering tunnel "+tunnel);
final Game game = ((TVF3Game)tr.getGame());
final OverworldSystem overworldSystem = ((TVF3Game)game).getCurrentMission().getOverworldSystem();
currentTunnel = tunnel;
((TVF3Game)game).getCurrentMission().notifyTunnelFound(tunnel);
setMissionMode(new TunnelMode());
tr.getDefaultGrid().nonBlockingAddBranch(tunnel);
tr.getDefaultGrid().blockingRemoveBranch(overworldSystem);
//Move player to tunnel
tr.mainRenderer.get().getSkyCube().setSkyCubeGen(Tunnel.TUNNEL_SKYCUBE_GEN);
//Ensure chamber mode is off
overworldSystem.setChamberMode(false);
overworldSystem.setTunnelMode(true);
//Update debug data
tr.getReporter().report("org.jtrfp.Tunnel.isInTunnel?", "true");
final ProjectileFactory [] pfs = tr.getResourceManager().getProjectileFactories();
for(ProjectileFactory pf:pfs){
Projectile [] projectiles = pf.getProjectiles();
for(Projectile proj:projectiles){
((WorldObject)proj).
probeForBehavior(LoopingPositionBehavior.class).
setEnable(false);
}//end for(projectiles)
}//end for(projectileFactories)
final Player player = ((TVF3Game)tr.getGame()).getPlayer();
player.setActive(false);
player.resetVelocityRotMomentum();
player.probeForBehavior(CollidesWithTunnelWalls.class).setEnable(true);
player.probeForBehavior(MovesByVelocity.class) .setVelocity(Vector3D.ZERO);
player.probeForBehavior(LoopingPositionBehavior.class).setEnable(false);
player.probeForBehavior(HeadingXAlwaysPositiveBehavior.class).setEnable(true);
player.probeForBehavior(CollidesWithTerrain.class) .setEnable(false);
tunnel.dispatchTunnelEntryNotifications();
final Camera secondaryCam = tr.secondaryRenderer.get().getCamera();
player.setPosition(secondaryCam.getPosition());
player.setHeading (secondaryCam.getHeading());
player.setTop (secondaryCam.getTop());
player.notifyPositionChange();
//Move the secondary cam to the overworld.
overworldSystem.setChamberMode(tunnel.getExitObject().isMirrorTerrain());
secondaryCam.setRootGrid(overworldSystem);
//Set the skycube appropriately
tr.secondaryRenderer.get().getSkyCube().setSkyCubeGen(((TVF3Game)tr.getGame()).
getCurrentMission().
getOverworldSystem().
getSkySystem().
getBelowCloudsSkyCubeGen());
tr.setRunState(new TunnelState(){});
player.setActive(true);
}//end enterTunnel()
/**
* @param listener
* @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(PropertyChangeListener listener) {
pcs.addPropertyChangeListener(listener);
}
/**
* @param propertyName
* @param listener
* @see java.beans.PropertyChangeSupport#addPropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener)
*/
public void addPropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
pcs.addPropertyChangeListener(propertyName, listener);
}
/**
* @return
* @see java.beans.PropertyChangeSupport#getPropertyChangeListeners()
*/
public PropertyChangeListener[] getPropertyChangeListeners() {
return pcs.getPropertyChangeListeners();
}
/**
* @param propertyName
* @return
* @see java.beans.PropertyChangeSupport#getPropertyChangeListeners(java.lang.String)
*/
public PropertyChangeListener[] getPropertyChangeListeners(
String propertyName) {
return pcs.getPropertyChangeListeners(propertyName);
}
/**
* @param propertyName
* @return
* @see java.beans.PropertyChangeSupport#hasListeners(java.lang.String)
*/
public boolean hasListeners(String propertyName) {
return pcs.hasListeners(propertyName);
}
/**
* @param listener
* @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(PropertyChangeListener listener) {
pcs.removePropertyChangeListener(listener);
}
/**
* @param propertyName
* @param listener
* @see java.beans.PropertyChangeSupport#removePropertyChangeListener(java.lang.String, java.beans.PropertyChangeListener)
*/
public void removePropertyChangeListener(String propertyName,
PropertyChangeListener listener) {
pcs.removePropertyChangeListener(propertyName, listener);
}
public static interface MissionMode{}
public static class LoadingMode implements MissionMode{}
public static class BriefingMode implements MissionMode{}
public static class IntroMode extends BriefingMode{}
public static class EnemyIntroMode extends IntroMode{}
public static class PlanetIntroMode extends IntroMode{}
public static class MissionSummaryMode extends BriefingMode{}
public static class GameplayMode implements MissionMode{}
public static class TunnelMode extends GameplayMode{}
public static class ChamberMode extends GameplayMode{}
public static class AboveGroundMode extends GameplayMode{}
/**
* @return the missionMode
*/
public MissionMode getMissionMode() {
return missionMode;
}
/**
* @param missionMode the missionMode to set
*/
public void setMissionMode(MissionMode missionMode) {
pcs.firePropertyChange(MISSION_MODE, this.missionMode, missionMode);
this.missionMode = missionMode;
}
/**
* @return the bossFight
*/
public boolean isBossFight() {
return bossFight;
}
/**
* @param bossFight the bossFight to set
*/
public void setBossFight(boolean bossFight) {
pcs.firePropertyChange("bossFight", this.bossFight, bossFight);
this.bossFight = bossFight;
}
public void setSatelliteView(boolean satelliteView) {
if(!(getMissionMode() instanceof AboveGroundMode)&&satelliteView)
throw new IllegalArgumentException("Cannot activate satellite view while mission mode is "+getMissionMode().getClass().getSimpleName());
if(satelliteView && ((TVF3Game)tr.getGame()).isPaused())
throw new IllegalArgumentException("Cannot activate satellite view while paused.");
pcs.firePropertyChange(SATELLITE_VIEW, this.satelliteView, satelliteView);
if(satelliteView!=this.satelliteView){
final Game game = ((TVF3Game)tr.getGame());
final Camera cam = tr.mainRenderer.get().getCamera();
if(satelliteView){//Switched on
tr.getThreadManager().setPaused(true);
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
tr.getDefaultGrid().removeBranch(((TVF3Game)game).getNavSystem());
tr.getDefaultGrid().removeBranch(((TVF3Game)game).getHUDSystem());
}});
cam.setFogEnabled(false);
cam.probeForBehavior(MatchPosition.class).setEnable(false);
cam.probeForBehavior(MatchDirection.class).setEnable(false);
final Vector3D pPos = new Vector3D(((TVF3Game)game).getPlayer().getPosition());
final Vector3D pHeading = ((TVF3Game)tr.getGame()).getPlayer().getHeading();
cam.setPosition(new Vector3D(pPos.getX(),TR.visibilityDiameterInMapSquares*TR.mapSquareSize*.65,pPos.getZ()));
cam.setHeading(Vector3D.MINUS_J);
cam.setTop(new Vector3D(pHeading.getX(),.0000000001,pHeading.getZ()).normalize());
((TVF3Game)tr.getGame()).getSatDashboard().setVisible(true);
}else{//Switched off
tr.getThreadManager().setPaused(false);
World.relevanceExecutor.submit(new Runnable(){
@Override
public void run() {
((TVF3Game)tr.getGame()).getNavSystem().activate();
tr.getDefaultGrid().addBranch(((TVF3Game)game).getNavSystem());
tr.getDefaultGrid().addBranch(((TVF3Game)game).getHUDSystem());
}});
cam.setFogEnabled(true);
cam.probeForBehavior(MatchPosition.class).setEnable(true);
cam.probeForBehavior(MatchDirection.class).setEnable(true);
((TVF3Game)tr.getGame()).getSatDashboard().setVisible(false);
}//end !satelliteView
}//end if(change)
this.satelliteView=satelliteView;
}
/**
* @return the satelliteView
*/
public boolean isSatelliteView() {
System.out.println("isSatelliteView="+satelliteView);
return satelliteView;
}
public Tunnel getCurrentTunnel() {
if(!(getMissionMode() instanceof TunnelMode))return null;
return currentTunnel;
}
public Game getGame() {
return game;
}
public void destruct() {
Features.destruct(this);
}
}// end Mission
|
package org.kohsuke.github;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Date;
public class GHThread extends GHObject {
private GitHub root;
private GHRepository repository;
private Subject subject;
private String reason;
private boolean unread;
private String last_read_at;
private String url,subscription_url;
static class Subject {
String title;
String url;
String latest_comment_url;
String type;
}
private GHThread() {// no external construction allowed
}
/**
* Returns null if the entire thread has never been read.
*/
public Date getLastReadAt() {
return GitHub.parseDate(last_read_at);
}
public String getReason() {
return reason;
}
public GHRepository getRepository() {
return repository;
}
// TODO: how to expose the subject?
public boolean isRead() {
return !unread;
}
public String getTitle() {
return subject.title;
}
public String getType() {
return subject.type;
}
public GHIssue getBoundIssue() throws IOException {
if (!"Issue".equals(subject.type))
throw new IllegalStateException("Notification doesn't point to Issue");
return repository.getIssue(
Integer.parseInt(subject.url.substring(subject.url.lastIndexOf('/') + 1)));
}
public GHPullRequest getBoundPullRequest() throws IOException {
if (!"PullRequest".equals(subject.type))
throw new IllegalStateException("Notification doesn't point to PullRequest");
return repository.getPullRequest(
Integer.parseInt(subject.url.substring(subject.url.lastIndexOf('/') + 1)));
}
public GHCommit getBoundCommit() throws IOException {
if (!"Commit".equals(subject.type))
throw new IllegalStateException("Notification doesn't point to Commit");
return repository.getCommit(subject.url.substring(subject.url.lastIndexOf('/') + 1));
}
/*package*/ GHThread wrap(GitHub root) {
this.root = root;
if (this.repository!=null)
this.repository.wrap(root);
return this;
}
/**
* Marks this thread as read.
*/
public void markAsRead() throws IOException {
new Requester(root).method("PATCH").to(url);
}
/**
* Subscribes to this conversation to get notifications.
*/
public GHSubscription subscribe(boolean subscribed, boolean ignored) throws IOException {
return new Requester(root)
.with("subscribed", subscribed)
.with("ignored", ignored)
.method("PUT").to(subscription_url, GHSubscription.class).wrapUp(root);
}
/**
* Returns the current subscription for this thread.
*
* @return null if no subscription exists.
*/
public GHSubscription getSubscription() throws IOException {
try {
return new Requester(root).to(subscription_url, GHSubscription.class).wrapUp(root);
} catch (FileNotFoundException e) {
return null;
}
}
}
|
package org.pcollections;
import java.io.Serializable;
import java.util.AbstractSequentialList;
import java.util.Collection;
import java.util.Iterator;
import java.util.ListIterator;
/**
* A simple persistent stack of non-null values.
*
* <p>This implementation is thread-safe (assuming Java's AbstractSequentialList is thread-safe),
* although its iterators may not be.
*
* @author harold
* @param <E>
*/
public final class ConsPStack<E> extends AbstractSequentialList<E>
implements PStack<E>, Serializable {
private static final long serialVersionUID = 1L;
//// STATIC FACTORY METHODS ////
private static final ConsPStack<Object> EMPTY = new ConsPStack<Object>();
/**
* @param <E>
* @return an empty stack
*/
@SuppressWarnings("unchecked")
public static <E> ConsPStack<E> empty() {
return (ConsPStack<E>) EMPTY;
}
/**
* @param <E>
* @param e
* @return empty().plus(e)
*/
public static <E> ConsPStack<E> singleton(final E e) {
return ConsPStack.<E>empty().plus(e);
}
/**
* @param <E>
* @param list
* @return a stack consisting of the elements of list in the order of list.iterator()
*/
@SuppressWarnings("unchecked")
public static <E> ConsPStack<E> from(final Collection<? extends E> list) {
if (list instanceof ConsPStack)
return (ConsPStack<E>) list; // (actually we only know it's ConsPStack<? extends E>)
// but that's good enough for an immutable
// (i.e. we can't mess someone else up by adding the wrong type to it)
return ConsPStack.<E>from(list.iterator());
}
private static <E> ConsPStack<E> from(final Iterator<? extends E> i) {
if (!i.hasNext()) return empty();
E e = i.next();
return ConsPStack.<E>from(i).plus(e);
}
//// PRIVATE CONSTRUCTORS ////
private final E first;
private final ConsPStack<E> rest;
private final int size;
// not externally instantiable (or subclassable):
private ConsPStack() { // EMPTY constructor
if (EMPTY != null) throw new RuntimeException("empty constructor should only be used once");
size = 0;
first = null;
rest = null;
}
private ConsPStack(final E first, final ConsPStack<E> rest) {
this.first = first;
this.rest = rest;
size = 1 + rest.size;
}
//// REQUIRED METHODS FROM AbstractSequentialList ////
@Override
public int size() {
return size;
}
@Override
public ListIterator<E> listIterator(final int index) {
if (index < 0 || index > size) throw new IndexOutOfBoundsException();
return new ListIterator<E>() {
int i = index;
ConsPStack<E> next = subList(index);
public boolean hasNext() {
return next.size > 0;
}
public boolean hasPrevious() {
return i > 0;
}
public int nextIndex() {
return index;
}
public int previousIndex() {
return index - 1;
}
public E next() {
E e = next.first;
next = next.rest;
return e;
}
public E previous() {
System.err.println("ConsPStack.listIterator().previous() is inefficient, don't use it!");
next = subList(index - 1); // go from beginning...
return next.first;
}
public void add(final E o) {
throw new UnsupportedOperationException();
}
public void remove() {
throw new UnsupportedOperationException();
}
public void set(final E o) {
throw new UnsupportedOperationException();
}
};
}
//// OVERRIDDEN METHODS FROM AbstractSequentialList ////
@Override
public ConsPStack<E> subList(final int start, final int end) {
if (start < 0 || end > size || start > end) throw new IndexOutOfBoundsException();
if (start == end) return empty();
if (start > 0) return subList(start).subList(0, end - start); // remove from beginning
if (end == size) return this;
// remove from end (by popping off until end, and then pushing back on)
ConsPStack<E> reversed = empty();
for (final E e : this) {
if (reversed.size == end) break;
reversed = reversed.plus(e);
}
return this.<E>empty().plusAll(reversed); // plusAll reverses again
}
//// IMPLEMENTED METHODS OF PStack ////
public ConsPStack<E> plus(final E e) {
return new ConsPStack<E>(e, this);
}
public ConsPStack<E> plusAll(final Collection<? extends E> list) {
ConsPStack<E> result = this;
for (E e : list) result = result.plus(e);
return result;
}
public ConsPStack<E> plus(final int i, final E e) {
if (i < 0 || i > size) throw new IndexOutOfBoundsException();
if (i == 0) return plus(e); // insert at beginning
return new ConsPStack<E>(first, rest.plus(i - 1, e));
}
public ConsPStack<E> plusAll(final int i, final Collection<? extends E> list) {
// TODO inefficient if list.isEmpty()
if (i < 0 || i > size) throw new IndexOutOfBoundsException();
if (i == 0) return plusAll(list);
return new ConsPStack<E>(first, rest.plusAll(i - 1, list));
}
public ConsPStack<E> minus(final Object e) {
if (size == 0) return this;
if (first.equals(e)) return rest; // found it. don't recurse (only remove one)
// otherwise keep looking:
ConsPStack<E> newRest = rest.minus(e);
if (newRest == rest) return this;
return new ConsPStack<E>(first, newRest);
}
public ConsPStack<E> minus(final int i) {
if (i < 0 || i >= size) throw new IndexOutOfBoundsException("Index: " + i + "; size: " + size);
else if (i == 0) return rest;
else return new ConsPStack<E>(first, rest.minus(i - 1));
}
public ConsPStack<E> minusAll(final Collection<?> list) {
if (size == 0) return this;
if (list.contains(first))
return rest.minusAll(list); // get rid of current element. recursively delete all
// either way keep looking:
ConsPStack<E> newRest = rest.minusAll(list);
if (newRest == rest) return this;
return new ConsPStack<E>(first, newRest);
}
public ConsPStack<E> with(final int i, final E e) {
if (i < 0 || i >= size) throw new IndexOutOfBoundsException();
if (i == 0) {
if (first.equals(e)) return this;
return new ConsPStack<E>(e, rest);
}
ConsPStack<E> newRest = rest.with(i - 1, e);
if (newRest == rest) return this;
return new ConsPStack<E>(first, newRest);
}
public ConsPStack<E> subList(int start) {
if (start < 0 || start > size) throw new IndexOutOfBoundsException();
ConsPStack<E> s = this;
while (start > 0) {
s = s.rest;
start
}
return s;
}
}
|
package org.testng.annotations;
import static java.lang.annotation.ElementType.CONSTRUCTOR;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.TYPE;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Mark a class or a method as part of the test.
*
* @author Cedric Beust, Apr 26, 2004
*/
@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
@Target({METHOD, TYPE, CONSTRUCTOR})
public @interface Test {
/**
* The list of groups this class/method belongs to.
*/
public String[] groups() default {};
/**
* Whether methods on this class/method are enabled.
*/
public boolean enabled() default true;
/**
* The list of variables used to fill the parameters of this method.
* These variables must be defined in the property file.
*
* @deprecated Use @Parameters
*/
@Deprecated
public String[] parameters() default {};
/**
* The list of groups this method depends on. Every method
* member of one of these groups is guaranteed to have been
* invoked before this method. Furthermore, if any of these
* methods was not a SUCCESS, this test method will not be
* run and will be flagged as a SKIP.
*/
public String[] dependsOnGroups() default {};
/**
* The list of methods this method depends on. There is no guarantee
* on the order on which the methods depended upon will be run, but you
* are guaranteed that all these methods will be run before the test method
* that contains this annotation is run. Furthermore, if any of these
* methods was not a SUCCESS, this test method will not be
* run and will be flagged as a SKIP.
*
* If some of these methods have been overloaded, all the overloaded
* versions will be run.
*/
public String[] dependsOnMethods() default {};
/**
* The maximum number of milliseconds this test should take.
* If it hasn't returned after this time, it will be marked as a FAIL.
*/
public long timeOut() default 0;
/**
* The maximum number of milliseconds that the total number of invocations on this test
* method should take. This annotation will be ignored if the attribute invocationCount
* is not specified on this method.
* If it hasn't returned after this time, it will be marked as a FAIL.
*/
public long invocationTimeOut() default 0;
/**
* The number of times this method should be invoked.
*/
public int invocationCount() default 1;
/**
* The size of the thread pool for this method. The method will be invoked
* from multiple threads as specified by invocationCount.
* Note: this attribute is ignored if invocationCount is not specified
*/
public int threadPoolSize() default 0;
/**
* The percentage of success expected from this method.
*/
public int successPercentage() default 100;
/**
* The name of the data provider for this test method.
* @see org.testng.annotations.DataProvider
*/
public String dataProvider() default "";
/**
* The class where to look for the data provider. If not
* specified, the dataprovider will be looked on the class
* of the current test method or one of its super classes.
* If this attribute is specified, the data provider method
* needs to be static on the specified class.
*/
public Class<?> dataProviderClass() default Object.class;
/**
* If set to true, this test method will always be run even if it depends
* on a method that failed. This attribute will be ignored if this test
* doesn't depend on any method or group.
*/
public boolean alwaysRun() default false;
/**
* The description for this method. The string used will appear in the
* HTML report and also on standard output if verbose >= 2.
*/
public String description() default "";
/**
* The list of exceptions that a test method is expected to throw. If no
* exception or a different than one on this list is thrown, this test will be
* marked a failure.
*/
public Class[] expectedExceptions() default {};
/**
* If expectedExceptions was specified, its message must match the regular expression
* specified in this attribute.
*/
public String expectedExceptionsMessageRegExp() default ".*";
/**
* The name of the suite this test class should be placed in. This
* attribute is ignore if @Test is not at the class level.
*/
public String suiteName() default "";
/**
* The name of the test this test class should be placed in. This
* attribute is ignore if @Test is not at the class level.
*/
public String testName() default "";
/**
* @deprecated Use singleThreaded
*/
public boolean sequential() default false;
/**
* If set to true, all the methods on this test class are guaranteed to run
* in the same thread, even if the tests are currently being run with parallel="true".
*
* This attribute can only be used at the class level and will be ignored
* if used at the method level.
*/
public boolean singleThreaded() default false;
/**
* The name of the class that should be called to test if the test
* should be retried.
* @return String The name of the class that will test if a test method
* should be retried.
*/
public Class retryAnalyzer() default Class.class;
/**
* If true and invocationCount is specified with a value > 1,
* then all invocations after a failure will be marked as a SKIP
* instead of a FAIL.
*/
public boolean skipFailedInvocations() default false;
/**
* If set to true, this test will run even if the methods
* it depends on are missing or excluded.
*/
public boolean ignoreMissingDependencies() default false;
/**
* The scheduling priority. Lower priorities will be scheduled first.
*/
int priority() default 0;
}
|
package tigase.server;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryUsage;
import java.lang.management.OperatingSystemMXBean;
import java.lang.management.ThreadMXBean;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.TreeMap;
import java.util.Timer;
import java.util.TimerTask;
import java.util.LinkedHashSet;
import java.util.concurrent.ConcurrentSkipListMap;
import java.util.logging.Logger;
import java.util.logging.Level;
import tigase.xml.Element;
import tigase.util.JIDUtils;
import tigase.util.UpdatesChecker;
import tigase.xmpp.Authorization;
import tigase.xmpp.StanzaType;
import tigase.xmpp.PacketErrorTypeException;
import tigase.disco.XMPPService;
import tigase.disco.ServiceEntity;
import tigase.disco.ServiceIdentity;
import tigase.stats.StatRecord;
import static tigase.server.MessageRouterConfig.*;
/**
* Class MessageRouter
*
*
* Created: Tue Nov 22 07:07:11 2005
*
* @author <a href="mailto:artur.hefczyc@tigase.org">Artur Hefczyc</a>
* @version $Rev$
*/
public class MessageRouter extends AbstractMessageReceiver {
// implements XMPPService {
public static final String INFO_XMLNS =
"http://jabber.org/protocol/disco#info";
public static final String ITEMS_XMLNS =
"http://jabber.org/protocol/disco#items";
private static final Logger log =
Logger.getLogger("tigase.server.MessageRouter");
//private static final long startupTime = System.currentTimeMillis();
// private Set<String> localAddresses = new CopyOnWriteArraySet<String>();
private String disco_name = DISCO_NAME_PROP_VAL;
private boolean disco_show_version = DISCO_SHOW_VERSION_PROP_VAL;
private ComponentRegistrator config = null;
private ServiceEntity serviceEntity = null;
private UpdatesChecker updates_checker = null;
private Map<String, XMPPService> xmppServices =
new ConcurrentSkipListMap<String, XMPPService>();
private Map<String, ServerComponent> components =
new ConcurrentSkipListMap<String, ServerComponent>();
private Map<String, ServerComponent> components_byId =
new ConcurrentSkipListMap<String, ServerComponent>();
private Map<String, ComponentRegistrator> registrators =
new ConcurrentSkipListMap<String, ComponentRegistrator>();
private Map<String, MessageReceiver> receivers =
new ConcurrentSkipListMap<String, MessageReceiver>();
public void processPacketMR(final Packet packet, final Queue<Packet> results) {
if (packet.getPermissions() != Permissions.ADMIN) {
try {
Packet res = Authorization.NOT_AUTHORIZED.getResponseMessage(packet,
"You are not authorized for this action.", true);
results.offer(res);
//processPacket(res);
} catch (PacketErrorTypeException e) {
log.warning("Packet processing exception: " + e);
}
return;
}
log.finest("Command received: " + packet.getStringData());
switch (packet.getCommand()) {
case OTHER:
if (packet.getStrCommand() != null) {
if (packet.getStrCommand().startsWith("controll/")) {
String[] spl = packet.getStrCommand().split("/");
String cmd = spl[1];
if (cmd.equals("stop")) {
Packet result = packet.commandResult("result");
results.offer(result);
//processPacket(result);
new Timer("Stopping...", true).schedule(new TimerTask() {
public void run() {
System.exit(0);
}
}, 2000);
}
}
}
break;
default:
break;
}
}
@Override
protected Integer getMaxQueueSize(int def) {
return def*10;
}
private ServerComponent[] getServerComponentsForRegex(String id) {
LinkedHashSet<ServerComponent> comps = new LinkedHashSet<ServerComponent>();
for (MessageReceiver mr: receivers.values()) {
if (mr.isInRegexRoutings(id)) {
log.finest("Found receiver: " + mr.getName());
comps.add(mr);
}
}
if (comps.size() > 0) {
return comps.toArray(new ServerComponent[comps.size()]);
} else {
return null;
}
}
private ServerComponent getLocalComponent(String jid) {
ServerComponent comp = components_byId.get(jid);
if (comp != null) {
return comp;
}
String host = JIDUtils.getNodeHost(jid);
String nick = JIDUtils.getNodeNick(jid);
if (nick != null) {
comp = components.get(nick);
if (comp != null &&
(isLocalDomain(host) || host.equals(getDefHostName()))) {
return comp;
}
}
int idx = host.indexOf('.');
if (idx > 0) {
String cmpName = host.substring(0, idx);
String basename = host.substring(idx + 1);
if (comp == null) {
comp = components.get(cmpName);
}
if (comp != null &&
(isLocalDomain(basename) || basename.equals(getDefHostName()))) {
return comp;
}
}
return null;
}
// private String isToLocalComponent(String jid) {
// String nick = JIDUtils.getNodeNick(jid);
// if (nick == null) {
// return null;
// String host = JIDUtils.getNodeHost(jid);
// if (isLocalDomain(host) && components.get(nick) != null) {
// return nick;
// return null;
// private boolean isLocalDomain(String domain) {
// return localAddresses.contains(domain);
public void processPacket(Packet packet) {
if (packet.getTo() == null) {
log.warning("Packet with TO attribute set to NULL: "
+ packet.getStringData());
return;
} // end of if (packet.getTo() == null)
// Intentionally comparing to static, final String
if (packet.getTo() == NULL_ROUTING) {
log.info("NULL routing, it is normal if server doesn't know how to"
+ " process packet: " + packet.toString());
try {
Packet error =
Authorization.FEATURE_NOT_IMPLEMENTED.getResponseMessage(packet,
"Feature not supported yet.", true);
addOutPacketNB(error);
} catch (PacketErrorTypeException e) {
log.warning("Packet processing exception: " + e);
}
return;
}
// if (log.isLoggable(Level.FINER)) {
// log.finer("Processing packet: " + packet.getElemName()
// + ", type: " + packet.getType());
if (log.isLoggable(Level.FINEST)) {
log.finest("Processing packet: " + packet.toString());
}
// Detect inifinite loop if from == to
// Maybe it is not needed anymore...
// There is a need to process packets with the same from and to address
// let't try to relax restriction and block all packets with error type
// 2008-06-16
if ((packet.getType() == StanzaType.error &&
packet.getFrom() != null &&
packet.getFrom().equals(packet.getTo())) ||
(packet.getFrom() == NULL_ROUTING &&
packet.getElemFrom() != null &&
packet.getElemFrom().equals(packet.getTo()))) {
log.warning("Possible infinite loop, dropping packet: "
+ packet.toString());
return;
}
ServerComponent comp = packet.getElemTo() == null ? null
: getLocalComponent(packet.getElemTo());
if (packet.isServiceDisco() && packet.getType() != null &&
packet.getType() == StanzaType.get &&
((comp != null && !(comp instanceof DisableDisco)) ||
isLocalDomain(packet.getElemTo()))) {
log.finest("Processing disco query by: " + getComponentId());
Queue<Packet> results = new LinkedList<Packet>();
processDiscoQuery(packet, results);
if (results.size() > 0) {
for (Packet res: results) {
// No more recurrential calls!!
addOutPacketNB(res);
} // end of for ()
}
return;
}
String id = JIDUtils.getNodeID(packet.getTo());
comp = getLocalComponent(id);
if (comp != null) {
log.finest("Packet is processing by: " + comp.getComponentId());
Queue<Packet> results = new LinkedList<Packet>();
if (comp == this) {
processPacketMR(packet, results);
} else {
comp.processPacket(packet, results);
}
if (results.size() > 0) {
for (Packet res: results) {
// No more recurrential calls!!
addOutPacketNB(res);
// processPacket(res);
} // end of for ()
}
return;
}
// Let's try to find message receiver quick way
// In case if packet is handled internally:
// String nick = JIDUtils.getNodeNick(packet.getTo());
String host = JIDUtils.getNodeHost(packet.getTo());
// MessageReceiver first = null;
// Below code probably never get's executed anyway.
// All components included in commented code below should
// be picked up by code above.
// if (nick != null) {
// first = receivers.get(nick);
// } // end of if (nick != null)
// if (first != null && host.equals(getDefHostName())) {
// log.finest("Found receiver: " + first.getName());
// first.addPacketNB(packet);
// return;
// } // end of if (mr != null)
// This packet is not processed localy, so let's find receiver
// which will send it to correct destination:
ServerComponent[] comps = getComponentsForLocalDomain(host);
if (comps == null) {
comps = getServerComponentsForRegex(id);
}
if (comps == null && !isLocalDomain(host)) {
comps = getComponentsForNonLocalDomain(host);
}
if (comps != null) {
Queue<Packet> results = new LinkedList<Packet>();
for (ServerComponent serverComponent : comps) {
log.finest("Packet processed by: " +
serverComponent.getComponentId());
serverComponent.processPacket(packet, results);
if (results.size() > 0) {
for (Packet res : results) {
// No more recurrential calls!!
addOutPacketNB(res);
// processPacket(res);
} // end of for ()
}
}
} else {
log.finest("There is no component for the packet, sending it back");
try {
addOutPacketNB(
Authorization.SERVICE_UNAVAILABLE.getResponseMessage(packet,
"There is no service found to process your request.", true));
} catch (PacketErrorTypeException e) {
// This packet is to local domain, we don't want to send it out
// drop packet :-(
log.warning("Can't process packet to local domain, dropping..."
+ packet.toString());
}
}
// MessageReceiver s2s = null;
// for (MessageReceiver mr: receivers.values()) {
// Set<String> routings = mr.getRoutings();
// if (routings != null) {
// log.finest(mr.getName() + ": Looking for host: " + host +
// " in " + routings.toString());
// if (routings.contains(host) || routings.contains(id)) {
// log.finest("Found receiver: " + mr.getName());
// mr.addPacketNB(packet);
// return;
// } // end of if (routings.contains())
// // Resolve wildchars routings....
// if (mr.isInRegexRoutings(id)) {
// log.finest("Found receiver: " + mr.getName());
// mr.addPacketNB(packet);
// return;
// if (routings.contains("*")) {
// // I found s2s receiver, remember it for later....
// s2s = mr;
// } // end of if (routings.contains())
// } // end of if (routings != null)
// else {
// log.severe("Routings are null for: " + mr.getName());
// } // end of if (routings != null) else
// } // end of for (MessageReceiver mr: receivers.values())
// // It is not for any local host, so maybe it is for some
// // remote server, let's try sending it through s2s service:
// if (localAddresses.contains(host) || comp != null) {
// try {
// addOutPacketNB(
// Authorization.FEATURE_NOT_IMPLEMENTED.getResponseMessage(packet,
// "Your request can not be processed.", true));
// } catch (PacketErrorTypeException e) {
// // This packet is to local domain, we don't want to send it out
// // drop packet :-(
// log.warning("Can't process packet to local domain, dropping..."
// + packet.toString());
// return;
// if (s2s != null) {
// s2s.addPacketNB(packet);
// } // end of if (s2s != null)
}
private ServerComponent[] getComponentsForLocalDomain(String domain) {
return vHostManager.getComponentsForLocalDomain(domain);
}
private ServerComponent[] getComponentsForNonLocalDomain(String domain) {
return vHostManager.getComponentsForNonLocalDomain(domain);
}
public void setConfig(ComponentRegistrator config) {
components.put(getName(), this);
this.config = config;
addRegistrator(config);
}
public void addRegistrator(ComponentRegistrator registr) {
log.info("Adding registrator: " + registr.getClass().getSimpleName());
registrators.put(registr.getName(), registr);
addComponent(registr);
for (ServerComponent comp : components.values()) {
// if (comp != registr) {
registr.addComponent(comp);
// } // end of if (comp != registr)
} // end of for (ServerComponent comp : components)
}
public void addRouter(MessageReceiver receiver) {
log.info("Adding receiver: " + receiver.getClass().getSimpleName());
addComponent(receiver);
receivers.put(receiver.getName(), receiver);
}
public void addComponent(ServerComponent component) {
log.info("Adding component: " + component.getClass().getSimpleName());
for (ComponentRegistrator registr : registrators.values()) {
if (registr != component) {
log.finer("Adding: " + component.getName() + " component to "
+ registr.getName() + " registrator.");
registr.addComponent(component);
} // end of if (reg != component)
} // end of for ()
components.put(component.getName(), component);
components_byId.put(component.getComponentId(), component);
if (component instanceof XMPPService) {
xmppServices.put(component.getName(), (XMPPService)component);
}
}
@Override
public Map<String, Object> getDefaults(Map<String, Object> params) {
Map<String, Object> defs = super.getDefaults(params);
MessageRouterConfig.getDefaults(defs, params, getName());
return defs;
}
private boolean inProperties = false;
@Override
public void setProperties(Map<String, Object> props) {
if (inProperties) {
return;
} else {
inProperties = true;
} // end of if (inProperties) else
disco_name = (String)props.get(DISCO_NAME_PROP_KEY);
disco_show_version = (Boolean)props.get(DISCO_SHOW_VERSION_PROP_KEY);
serviceEntity = new ServiceEntity("Tigase", "server", "Session manager");
serviceEntity.addIdentities(new ServiceIdentity[] {
new ServiceIdentity("server", "im", disco_name +
(disco_show_version ?
(" ver. " + tigase.server.XMPPServer.getImplementationVersion())
: ""))});
serviceEntity.addFeatures(XMPPService.DEF_FEATURES);
try {
super.setProperties(props);
// String[] localAddresses = (String[])props.get(LOCAL_ADDRESSES_PROP_KEY);
// this.localAddresses.clear();
// if (localAddresses != null && localAddresses.length > 0) {
// Collections.addAll(this.localAddresses, localAddresses);
// this.localAddresses.add(getDefHostName());
Map<String, ComponentRegistrator> tmp_reg = registrators;
Map<String, MessageReceiver> tmp_rec = receivers;
components = new TreeMap<String, ServerComponent>();
registrators = new TreeMap<String, ComponentRegistrator>();
receivers = new TreeMap<String, MessageReceiver>();
setConfig(config);
MessageRouterConfig conf = new MessageRouterConfig(props);
String[] reg_names = conf.getRegistrNames();
for (String name: reg_names) {
ComponentRegistrator cr = tmp_reg.remove(name);
String cls_name =
(String)props.get(REGISTRATOR_PROP_KEY + name + ".class");
try {
if (cr == null || !cr.getClass().getName().equals(cls_name)) {
if (cr != null) {
cr.release();
}
cr = conf.getRegistrInstance(name);
cr.setName(name);
} // end of if (cr == null)
addRegistrator(cr);
} // end of try
catch (Exception e) {
e.printStackTrace();
} // end of try-catch
} // end of for (String name: reg_names)
for (ComponentRegistrator cr: tmp_reg.values()) {
cr.release();
} // end of for ()
tmp_reg.clear();
String[] msgrcv_names = conf.getMsgRcvNames();
for (String name: msgrcv_names) {
log.finer("Loading and registering message receiver: " + name);
MessageReceiver mr = tmp_rec.remove(name);
String cls_name =
(String)props.get(MSG_RECEIVERS_PROP_KEY + name + ".class");
try {
if (mr == null || !mr.getClass().getName().equals(cls_name)) {
if (mr != null) {
mr.release();
}
mr = conf.getMsgRcvInstance(name);
mr.setParent(this);
mr.setName(name);
mr.start();
} // end of if (cr == null)
addRouter(mr);
} // end of try
catch (Exception e) {
e.printStackTrace();
} // end of try-catch
} // end of for (String name: reg_names)
for (MessageReceiver mr: tmp_rec.values()) {
mr.release();
} // end of for ()
tmp_rec.clear();
if ((Boolean)props.get(UPDATES_CHECKING_PROP_KEY)) {
installUpdatesChecker((Long)props.get(UPDATES_CHECKING_INTERVAL_PROP_KEY));
} else {
stopUpdatesChecker();
}
} finally {
inProperties = false;
} // end of try-finally
for (ServerComponent comp : components.values()) {
comp.initializationCompleted();
}
}
private void stopUpdatesChecker() {
if (updates_checker != null) {
updates_checker.interrupt();
updates_checker = null;
}
}
private void installUpdatesChecker(long interval) {
stopUpdatesChecker();
updates_checker = new UpdatesChecker(interval, this,
"This is automated message generated by updates checking module.\n"
+ " You can disable this function changing configuration option: "
+ "'/" + getName() + "/" + UPDATES_CHECKING_PROP_KEY + "' or adjust"
+ " updates checking interval time changing option: "
+ "'/" + getName() + "/" + UPDATES_CHECKING_INTERVAL_PROP_KEY + "' which"
+ " now set to " + interval + " days.");
updates_checker.start();
}
private void processDiscoQuery(final Packet packet,
final Queue<Packet> results) {
String jid = packet.getElemTo();
String nick = JIDUtils.getNodeNick(jid);
String node = packet.getAttribute("/iq/query", "node");
Element query = packet.getElement().getChild("query").clone();
if (packet.isXMLNS("/iq/query", INFO_XMLNS)) {
if (isLocalDomain(jid)) {
query = getDiscoInfo(node, jid);
for (XMPPService comp: xmppServices.values()) {
List<Element> features = comp.getDiscoFeatures();
if (features != null) {
query.addChildren(features);
}
} // end of for ()
} else {
for (XMPPService comp: xmppServices.values()) {
// if (jid.startsWith(comp.getName() + ".")) {
Element resp = comp.getDiscoInfo(node, jid);
if (resp != null) {
query = resp;
break;
}
} // end of for ()
}
}
if (packet.isXMLNS("/iq/query", ITEMS_XMLNS)) {
boolean localDomain = isLocalDomain(jid);
if (localDomain) {
for (XMPPService comp: xmppServices.values()) {
// if (localDomain || (nick != null && comp.getName().equals(nick))) {
List<Element> items = comp.getDiscoItems(node, jid);
log.finest("DiscoItems processed by: " + comp.getComponentId()
+ ", items: " + (items == null ? null : items.toString()));
if (items != null && items.size() > 0) {
query.addChildren(items);
}
} // end of for ()
} else {
ServerComponent comp = getLocalComponent(packet.getElemTo());
if (comp != null && comp instanceof XMPPService) {
List<Element> items = ((XMPPService)comp).getDiscoItems(node, jid);
log.finest("DiscoItems processed by: " + comp.getComponentId()
+ ", items: " + (items == null ? null : items.toString()));
if (items != null && items.size() > 0) {
query.addChildren(items);
}
}
}
}
results.offer(packet.okResult(query, 0));
}
public Element getDiscoInfo(String node, String jid) {
Element query = serviceEntity.getDiscoInfo(null);
log.finest("Returing disco-info: " + query.toString());
return query;
}
public List<Element> getDiscoItems(String node, String jid) {
return null;
}
@Override
public List<StatRecord> getStatistics() {
List<StatRecord> stats = super.getStatistics();
long uptime = ManagementFactory.getRuntimeMXBean().getUptime();
long days = uptime / (24 * HOUR);
long hours = (uptime - (days * 24 * HOUR)) / HOUR;
long minutes = (uptime - (days * 24 * HOUR + hours * HOUR)) / MINUTE;
long seconds =
(uptime - (days * 24 * HOUR + hours * HOUR + minutes * MINUTE)) / SECOND;
// StringBuilder sb = new StringBuilder();
stats.add(new StatRecord(getName(), "Uptime", "time", ""
+ (days > 0 ? days + " day, " : "")
+ (hours > 0 ? hours + " hour, " : "")
+ (minutes > 0 ? minutes + " min, " : "")
+ (seconds > 0 ? seconds + " sec" : "")
, Level.INFO));
// Runtime runtime = Runtime.getRuntime();
// Run GC for more accurate memory readings
//runtime.gc();
// long maxMem = runtime.maxMemory();
// long totalMem = runtime.totalMemory();
// long freeMem = runtime.freeMemory();
// stats.add(new StatRecord(getName(), "Max JVM mem", "long", maxMem,
// Level.FINEST));
// stats.add(new StatRecord(getName(), "Total JVM mem", "long", totalMem,
// Level.FINEST));
// stats.add(new StatRecord(getName(), "Free JVM mem", "long", freeMem,
// Level.FINEST));
OperatingSystemMXBean osBean = ManagementFactory.getOperatingSystemMXBean();
NumberFormat format = NumberFormat.getNumberInstance();
format.setMaximumFractionDigits(4);
stats.add(new StatRecord(getName(), "Load average", "double",
format.format(osBean.getSystemLoadAverage()), Level.INFO));
stats.add(new StatRecord(getName(), "CPUs no", "int",
osBean.getAvailableProcessors(), Level.FINEST));
ThreadMXBean thBean = ManagementFactory.getThreadMXBean();
stats.add(new StatRecord(getName(), "Threads count", "int",
thBean.getThreadCount(), Level.FINEST));
long cpuTime = 0;
for (long thid : thBean.getAllThreadIds()) {
cpuTime += thBean.getThreadCpuTime(thid);
}
// stats.add(new StatRecord(getName(), "Threads CPU time", "long", cpuTime,
// Level.FINEST));
double cpuUsage = (new Long(cpuTime).doubleValue() / 1000000) / new Long(
uptime).doubleValue();
format = NumberFormat.getPercentInstance();
format.setMaximumFractionDigits(2);
stats.add(new StatRecord(getName(), "CPU usage", "double",
format.format(cpuUsage), Level.INFO));
MemoryUsage heap = ManagementFactory.getMemoryMXBean().getHeapMemoryUsage();
MemoryUsage nonHeap =
ManagementFactory.getMemoryMXBean().getNonHeapMemoryUsage();
format = NumberFormat.getIntegerInstance();
if (format instanceof DecimalFormat) {
DecimalFormat decf = (DecimalFormat)format;
decf.applyPattern(decf.toPattern()+" KB");
}
stats.add(new StatRecord(getName(), "Max Heap mem", "long",
format.format(heap.getMax()/1024), Level.INFO));
stats.add(new StatRecord(getName(), "Used Heap", "long",
format.format(heap.getUsed()/1024), Level.INFO));
stats.add(new StatRecord(getName(), "Free Heap", "long",
format.format((heap.getMax() - heap.getUsed())/1024), Level.INFO));
stats.add(new StatRecord(getName(), "Max NonHeap mem", "long",
format.format(nonHeap.getMax()/1024), Level.INFO));
stats.add(new StatRecord(getName(), "Used NonHeap", "long",
format.format(nonHeap.getUsed()/1024), Level.INFO));
stats.add(new StatRecord(getName(), "Free NonHeap", "long",
format.format((nonHeap.getMax() - nonHeap.getUsed())/1024), Level.INFO));
return stats;
}
}
|
package webbit.netty;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.codec.http.HttpChunkAggregator;
import org.jboss.netty.handler.codec.http.HttpRequestDecoder;
import org.jboss.netty.handler.codec.http.HttpResponseEncoder;
import webbit.HttpHandler;
import webbit.WebServer;
import webbit.WebSocketHandler;
import webbit.handler.HttpToWebSocketHandler;
import webbit.handler.PathMatchHandler;
import java.io.IOException;
import java.net.*;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import static org.jboss.netty.channel.Channels.pipeline;
public class NettyWebServer implements WebServer {
private final ServerBootstrap bootstrap;
private final SocketAddress socketAddress;
private final URI publicUri;
private final List<HttpHandler> handlers = new ArrayList<HttpHandler>();
private final Executor executor;
private Channel channel;
public NettyWebServer(int port) {
this(Executors.newSingleThreadScheduledExecutor(), port);
}
public NettyWebServer(final Executor executor, int port) {
this(executor, new InetSocketAddress(port), localUri(port));
}
public NettyWebServer(final Executor executor, SocketAddress socketAddress, URI publicUri) {
this.executor = executor;
this.socketAddress = socketAddress;
this.publicUri = publicUri;
// Configure the server.
bootstrap = new ServerBootstrap();
// Set up the event pipeline factory.
bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
@Override
public ChannelPipeline getPipeline() throws Exception {
ChannelPipeline pipeline = pipeline();
pipeline.addLast("decoder", new HttpRequestDecoder());
pipeline.addLast("aggregator", new HttpChunkAggregator(65536));
pipeline.addLast("encoder", new HttpResponseEncoder());
pipeline.addLast("handler", new NettyHttpChannelHandler(executor, handlers));
return pipeline;
}
});
}
@Override
public URI getUri() {
return publicUri;
}
@Override
public Executor getExecutor() {
return executor;
}
@Override
public NettyWebServer add(HttpHandler handler) {
handlers.add(handler);
return this;
}
@Override
public NettyWebServer add(String path, HttpHandler handler) {
return add(new PathMatchHandler(path, handler));
}
@Override
public NettyWebServer add(String path, WebSocketHandler handler) {
return add(path, new HttpToWebSocketHandler(handler));
}
@Override
public synchronized NettyWebServer start() {
bootstrap.setFactory(new NioServerSocketChannelFactory(
Executors.newSingleThreadExecutor(),
Executors.newSingleThreadExecutor(), 1));
channel = bootstrap.bind(socketAddress);
return this;
}
@Override
public synchronized NettyWebServer stop() throws IOException {
if (channel != null) {
channel.close();
}
return this;
}
public synchronized NettyWebServer join() throws InterruptedException {
if (channel != null) {
channel.getCloseFuture().await();
}
return this;
}
private static URI localUri(int port) {
try {
return URI.create("http://" + InetAddress.getLocalHost().getHostName() + (port == 80 ? "" : (":" + port)) + "/");
} catch (UnknownHostException e) {
return null;
}
}
}
|
package verification.platu.stategraph;
import java.io.*;
import java.util.*;
import lpn.parser.LhpnFile;
import lpn.parser.Transition;
import verification.platu.common.PlatuObj;
import verification.platu.lpn.DualHashMap;
import verification.platu.lpn.LPN;
import verification.platu.lpn.LpnTranList;
import verification.platu.lpn.VarSet;
import verification.timed_state_exploration.zone.TimedState;
/**
* State
* @author Administrator
*/
public class State extends PlatuObj {
public static int[] counts = new int[15];
protected int[] marking;
protected int[] vector;
protected boolean[] tranVector; // indicator vector to record whether each transition is enabled or not.
private int hashVal = 0;
private LhpnFile lpn = null;
private int index;
private boolean localEnabledOnly;
protected boolean failure = false;
// The TimingState that extends this state with a zone. Null if untimed.
protected TimedState timeExtension;
@Override
public String toString() {
// String ret=Arrays.toString(marking)+""+
// Arrays.toString(vector);
// return "["+ret.replace("[", "{").replace("]", "}")+"]";
return this.print();
}
public State(final LhpnFile lpn, int[] new_marking, int[] new_vector, boolean[] new_isTranEnabled) {
this.lpn = lpn;
this.marking = new_marking;
this.vector = new_vector;
this.tranVector = new_isTranEnabled;
if (marking == null || vector == null || tranVector == null) {
new NullPointerException().printStackTrace();
}
//Arrays.sort(this.marking);
this.index = 0;
localEnabledOnly = false;
counts[0]++;
}
public State(State other) {
if (other == null) {
new NullPointerException().printStackTrace();
}
this.lpn = other.lpn;
this.marking = new int[other.marking.length];
System.arraycopy(other.marking, 0, this.marking, 0, other.marking.length);
this.vector = new int[other.vector.length];
System.arraycopy(other.vector, 0, this.vector, 0, other.vector.length);
this.tranVector = new boolean[other.tranVector.length];
System.arraycopy(other.tranVector, 0, this.tranVector, 0, other.tranVector.length);
// this.hashVal = other.hashVal;
this.hashVal = 0;
this.index = other.index;
this.localEnabledOnly = other.localEnabledOnly;
counts[0]++;
}
// TODO: (temp) Two Unused constructors, State() and State(Object otherState)
// public State() {
// this.marking = new int[0];
// this.vector = new int[0];//EMPTY_VECTOR.clone();
// this.hashVal = 0;
// this.index = 0;
// localEnabledOnly = false;
// counts[0]++;
//static PrintStream out = System.out;
// public State(Object otherState) {
// State other = (State) otherState;
// if (other == null) {
// new NullPointerException().printStackTrace();
// this.lpnModel = other.lpnModel;
// this.marking = new int[other.marking.length];
// System.arraycopy(other.marking, 0, this.marking, 0, other.marking.length);
// // this.vector = other.getVector().clone();
// this.vector = new int[other.vector.length];
// System.arraycopy(other.vector, 0, this.vector, 0, other.vector.length);
// this.hashVal = other.hashVal;
// this.index = other.index;
// this.localEnabledOnly = other.localEnabledOnly;
// counts[0]++;
public void setLpn(final LhpnFile thisLpn) {
this.lpn = thisLpn;
}
public LhpnFile getLpn() {
return this.lpn;
}
public void setLabel(String lbl) {
}
public String getLabel() {
return null;
}
/**
* This method returns the boolean array representing the status (enabled/disabled) of each transition in an LPN.
* @return
*/
public boolean[] getTranVector() {
return tranVector;
}
public void setIndex(int newIndex) {
this.index = newIndex;
}
public int getIndex() {
return this.index;
}
public boolean hasNonLocalEnabled() {
return this.localEnabledOnly;
}
public void hasNonLocalEnabled(boolean nonLocalEnabled) {
this.localEnabledOnly = nonLocalEnabled;
}
public boolean isFailure() {
return false;// getType() != getType().NORMAL || getType() !=
// getType().TERMINAL;
}
public static long tSum = 0;
@Override
public State clone() {
counts[6]++;
State s = new State(this);
return s;
}
public String print() {
DualHashMap<String, Integer> VarIndexMap = this.lpn.getVarIndexMap();
String message = "Marking: [";
for (int i : marking) {
message += i + ",";
}
message += "]\n" + "Vector: [";
for (int i = 0; i < vector.length; i++) {
message += VarIndexMap.getKey(i) + "=>" + vector[i]+", ";
}
message += "]\n" + "Transition Vector: [";
for (int i = 0; i < tranVector.length; i++) {
message += tranVector[i] + ",";
}
message += "]\n";
return message;
}
@Override
public int hashCode() {
if(hashVal == 0){
final int prime = 31;
int result = 1;
result = prime * result + ((lpn == null) ? 0 : lpn.getLabel().hashCode());
result = prime * result + Arrays.hashCode(marking);
result = prime * result + Arrays.hashCode(vector);
result = prime * result + Arrays.hashCode(tranVector);
hashVal = result;
}
return hashVal;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
State other = (State) obj;
if (lpn == null) {
if (other.lpn != null)
return false;
}
else if (!lpn.equals(other.lpn))
return false;
if (!Arrays.equals(marking, other.marking))
return false;
if (!Arrays.equals(vector, other.vector))
return false;
if (!Arrays.equals(tranVector, other.tranVector))
return false;
return true;
}
public void print(DualHashMap<String, Integer> VarIndexMap) {
System.out.print("Marking: [");
for (int i : marking) {
System.out.print(i + ",");
}
System.out.println("]");
System.out.print("Vector: [");
for (int i = 0; i < vector.length; i++) {
System.out.print(VarIndexMap.getKey(i) + "=>" + vector[i]+", ");
}
System.out.println("]");
System.out.print("Transition vector: [");
for (boolean bool : tranVector) {
System.out.print(bool + ",");
}
System.out.println("]");
}
/**
* @return the marking
*/
public int[] getMarking() {
return marking;
}
public void setMarking(int[] newMarking) {
marking = newMarking;
}
/**
* @return the vector
*/
public int[] getVector() {
// new Exception("StateVector getVector(): "+s).printStackTrace();
return vector;
}
public HashMap<String, Integer> getOutVector(VarSet outputs, DualHashMap<String, Integer> VarIndexMap) {
HashMap<String, Integer> outVec = new HashMap<String, Integer>();
for(int i = 0; i < vector.length; i++) {
String var = VarIndexMap.getKey(i);
if(outputs.contains(var) == true)
outVec.put(var, vector[i]);
}
return outVec;
}
public State getLocalState() {
//VarSet lpnOutputs = this.lpnModel.getOutputs();
//VarSet lpnInternals = this.lpnModel.getInternals();
Set<String> lpnOutputs = this.lpn.getAllOutputs().keySet();
Set<String> lpnInternals = this.lpn.getAllInternals().keySet();
DualHashMap<String,Integer> varIndexMap = this.lpn.getVarIndexMap();
int[] outVec = new int[this.vector.length];
/*
* Create a copy of the vector of mState such that the values of inputs are set to 0
* and the values for outputs/internal variables remain the same.
*/
for(int i = 0; i < this.vector.length; i++) {
String curVar = varIndexMap.getKey(i);
if(lpnOutputs.contains(curVar) ==true || lpnInternals.contains(curVar)==true)
outVec[i] = this.vector[i];
else
outVec[i] = 0;
}
// TODO: (??) Need to create outTranVector as well?
return new State(this.lpn, this.marking, outVec, this.tranVector);
}
/**
* @return the enabledSet
*/
public int[] getEnabledSet() {
return null;// enabledSet;
}
public LpnTranList getEnabledTransitions() {
LpnTranList enabledTrans = new LpnTranList();
for (int i=0; i<tranVector.length; i++) {
if (tranVector[i]) {
enabledTrans.add(this.lpn.getTransition(i));
}
}
return enabledTrans;
}
public String getEnabledSetString() {
String ret = "";
// for (int i : enabledSet) {
// ret += i + ", ";
return ret;
}
/**
* Return a new state if the newVector leads to a new state from this state; otherwise return null.
* @param newVector
* @param VarIndexMap
* @return
*/
public State update(StateGraph SG,HashMap<String, Integer> newVector, DualHashMap<String, Integer> VarIndexMap) {
int[] newStateVector = new int[this.vector.length];
boolean newState = false;
for(int index = 0; index < vector.length; index++) {
String var = VarIndexMap.getKey(index);
int this_val = this.vector[index];
Integer newVal = newVector.get(var);
if(newVal != null) {
if(this_val != newVal) {
newState = true;
newStateVector[index] = newVal;
}
else
newStateVector[index] = this.vector[index];
}
else
newStateVector[index] = this.vector[index];
}
boolean[] newEnabledTranVector = SG.updateEnabledTranVector(this.getTranVector(), this.marking, newStateVector, null);
if(newState == true)
return new State(this.lpn, this.marking, newStateVector, newEnabledTranVector);
return null;
}
/**
* Return a new state if the newVector leads to a new state from this state; otherwise return null.
* States considered here include a vector indicating enabled/disabled state of each transition.
* @param newVector
* @param VarIndexMap
* @return
*/
public State update(HashMap<String, Integer> newVector, DualHashMap<String, Integer> VarIndexMap,
boolean[] newTranVector) {
int[] newStateVector = new int[this.vector.length];
boolean newState = false;
for(int index = 0; index < vector.length; index++) {
String var = VarIndexMap.getKey(index);
int this_val = this.vector[index];
Integer newVal = newVector.get(var);
if(newVal != null) {
if(this_val != newVal) {
newState = true;
newStateVector[index] = newVal;
}
else
newStateVector[index] = this.vector[index];
}
else
newStateVector[index] = this.vector[index];
}
if (!this.tranVector.equals(newTranVector))
newState = true;
if(newState == true)
return new State(this.lpn, this.marking, newStateVector, newTranVector);
return null;
}
static public void printUsageStats() {
System.out.printf("%-20s %11s\n", "State", counts[0]);
System.out.printf("\t%-20s %11s\n", "State", counts[10]);
// System.out.printf("\t%-20s %11s\n", "State", counts[11]);
// System.out.printf("\t%-20s %11s\n", "merge", counts[1]);
System.out.printf("\t%-20s %11s\n", "update", counts[2]);
// System.out.printf("\t%-20s %11s\n", "compose", counts[3]);
System.out.printf("\t%-20s %11s\n", "equals", counts[4]);
// System.out.printf("\t%-20s %11s\n", "conjunction", counts[5]);
System.out.printf("\t%-20s %11s\n", "clone", counts[6]);
System.out.printf("\t%-20s %11s\n", "hashCode", counts[7]);
// System.out.printf("\t%-20s %11s\n", "resembles", counts[8]);
// System.out.printf("\t%-20s %11s\n", "digest", counts[9]);
}
//TODO: (original) try database serialization
public File serialize(String filename) throws FileNotFoundException,
IOException {
File f = new File(filename);
ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(f));
os.writeObject(this);
os.close();
return f;
}
public static State deserialize(String filename)
throws FileNotFoundException, IOException, ClassNotFoundException {
File f = new File(filename);
ObjectInputStream os = new ObjectInputStream(new FileInputStream(f));
State zone = (State) os.readObject();
os.close();
return zone;
}
public static State deserialize(File f) throws FileNotFoundException,
IOException, ClassNotFoundException {
ObjectInputStream os = new ObjectInputStream(new FileInputStream(f));
State zone = (State) os.readObject();
os.close();
return zone;
}
public boolean failure(){
return this.failure;
}
public void setFailure(){
this.failure = true;
}
public void print(LhpnFile lpn) {
System.out.print("Marking: [");
// for (int i : marking) {
// System.out.print(i + ",");
for (int i=0; i < marking.length; i++) {
System.out.print(lpn.getPlaceList().clone()[i] + "=" + marking[i] + ", ");
}
System.out.println("]");
System.out.print("Vector: [");
for (int i = 0; i < vector.length; i++) {
System.out.print(lpn.getVarIndexMap().getKey(i) + "=>" + vector[i]+", ");
}
System.out.println("]");
System.out.print("Transition vector: [");
for (boolean bool : tranVector) {
System.out.print(bool + ",");
}
System.out.println("]");
}
/**
* Getter for the TimingState that extends this state.
* @return
* The TimingState that extends this state if it has been set. Null, otherwise.
*/
public TimedState getTimeExtension(){
return timeExtension;
}
/**
* Setter for the TimingState that extends this state.
* @param s
* The TimingState that extends this state.
*/
public void setTimeExtension(TimedState s){
timeExtension = s;
}
}
|
import htsjdk.samtools.SAMRecord;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Set;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Queue;
import java.util.LinkedList;
import java.util.Collection;
import java.io.BufferedWriter;
import java.io.FileWriter;
import java.io.IOException;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import org.jgrapht.*;
import org.jgrapht.graph.*;
public class HLAGraph{
//A B C ... //HLA-DPA1, HLA-DPB1, HLA-DQA1, HLA-DQB1, HLA-DRA, and HLA-DRB1
private String HLAGeneName;
private ArrayList<Sequence> alleles;
private HashMap<String, Sequence> alleleHash;
//private SimpleDirectedWeightedGraph<Node, DefaultWeightedEdge> g;
private SimpleDirectedWeightedGraph<Node, CustomWeightedEdge> g;
//private ArrayList<StringBuffer> interBubbleSequences;
//private ArrayList<Path> interBubblePaths;
private ArrayList<TmpPath> interBubblePaths2;
//private ArrayList<HashMap<Character, Node>> nodeHashList;//list index = columnIndex-1.
private ArrayList<HashMap<Integer, Node>> nodeHashList;// list index = columnIndex - 1;
private ArrayList<HLASequence> typingSequences;
private Node sNode;
private Node tNode;
private int columnLen;
private String outputfilename;
private StringBuffer resultBuffer;
//keeps track excess lengths added to head and tail of typing regions(exon) due to bubbles in the beginning and end
private int[] headerExcessLengthBeyondTypingBoundary;
private int[] tailExcessLengthBeyondTypingBoundary;
/* Outer list index = columnIndex -1 --> insertion point */
/* Inner list index insertion length */
//private ArrayList<ArrayList<HashMap<Character, Node>>> insertionNodeHashList;
private ArrayList<ArrayList<HashMap<Integer, Node>>> insertionNodeHashList;
public void setTypingSequences(ArrayList<HLASequence> seqs){
this.typingSequences = seqs;
this.writeTypingSequences();
}
private void writeTypingSequences(){
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(HLAGeneName+"_typingSequences_G_group.fa"));
for(HLASequence hs : this.typingSequences)
bw.write(hs.toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
}
public SimpleDirectedWeightedGraph<Node, CustomWeightedEdge> getGraph(){
return this.g;
}
/* DO NOT use this to add sNode and tNode */
public void addVertex(Node n){
int ci = n.getColIndex();
this.g.addVertex(n);
this.nodeHashList.get(n.getColIndex()-1).put(new Integer(n.getIBase()), n);
}
/* DO NOT use this to add sNode and tNode */
public void removeVertex(Node n){
//this.nodeHashList.get(n.getColIndex()-1).remove(new Integer(n.getIBase()));
this.removeVertexFromNodeHashList(n);
this.g.removeVertex(n);
}
//removes node from nodeHashList. We dont touch insertionNodeHashList
//because any node added on insertionNodeHashList must have weights.
private void removeVertexFromNodeHashList(Node n){
this.nodeHashList.get(n.getColIndex()-1).remove(new Integer(n.getIBase()));
}
public void setHLAGeneName(String gn){
this.HLAGeneName = gn;
}
public Sequence getRefAllele(){
return this.alleles.get(0);
}
public HLAGraph(ArrayList<Sequence> seqs){
//int numTypingExons = 1;
//if(this.isClassI())
// numTypingExons = 2;
this.headerExcessLengthBeyondTypingBoundary = new int[2];
this.tailExcessLengthBeyondTypingBoundary = new int[2];//numTypingExons];
this.alleles = seqs;
this.alleleHash = new HashMap<String, Sequence>();
for(int i=0;i<this.alleles.size();i++){
this.alleleHash.put(this.alleles.get(i).getAlleleName(), this.alleles.get(i));
}
//this.g = new SimpleDirectedWeightedGraph<Node, DefaultWeightedEdge>(DefaultWeightedEdge.class);
this.g = new SimpleDirectedWeightedGraph<Node, CustomWeightedEdge>(CustomWeightedEdge.class);
this.sNode = new Node('s', 0);
this.tNode = new Node('t', this.alleles.get(0).getColLength() + 1);
this.g.addVertex(sNode);
this.g.addVertex(tNode);
//this.nodeHashList = new ArrayList<HashMap<Character, Node>>();
this.nodeHashList = new ArrayList<HashMap<Integer, Node>>();
//this.insertionNodeHashList = new ArrayList<ArrayList<HashMap<Character, Node>>>();
this.insertionNodeHashList = new ArrayList<ArrayList<HashMap<Integer, Node>>>();
this.buildGraph();
this.traverse();
}
/*
* finds all s-t paths in this graph based on BFS technique.
* Should only be used for each bubble.
*
*/
public ArrayList<Path> findAllSTPath(Node s, Node t){
ArrayList<Path> results = new ArrayList<Path>();
Queue<Path> pathsQ = new LinkedList<Path>();
//Set<CustomeWeightedEdge> edges = this.g.outgoingEdgesOf(s);
Iterator<CustomWeightedEdge> itr = this.g.outgoingEdgesOf(s).iterator();
//first load all outing edges as paths in paths queue.
while(itr.hasNext()){
pathsQ.add(new Path(itr.next()));
}
Path firstPath = null;
//while we have paths to explore further in the queue
while((firstPath = pathsQ.poll())!=null){
//obtain the vertex at the end for this path
Node lastVertex = firstPath.getLastVertex(this.g);
//if the last vertex is t, then we add this path in the result
if(lastVertex.equals(t)){
results.add(firstPath);
}else{//otherwise, we need to explor the paths further
itr = this.g.outgoingEdgesOf(lastVertex).iterator();
while(itr.hasNext()){
Path tmpP = firstPath.deepCopy();
tmpP.appendEdge(itr.next());
pathsQ.add(tmpP);
}
}
}
return results;
}
public ArrayList<Path> findAllSTPathPruning(Node s, Node t){
ArrayList<Path> results = new ArrayList<Path>();
Queue<Path> pathsQ = new LinkedList<Path>();
Queue<CustomHashMap> readsetQ = new LinkedList<CustomHashMap>(); //we need to keep track of readset to prune branches based on the size
Iterator<CustomWeightedEdge> itr = this.g.outgoingEdgesOf(s).iterator();
//first load all outing edges as paths in paths queue.
while(itr.hasNext()){
//Path curP = new Path(itr.next());
CustomWeightedEdge curE = itr.next();
pathsQ.add(new Path(curE));
readsetQ.add(curE.getReadHashSet().clone());
}
Path firstPath = null;
CustomHashMap firstReadSet = null;
//while we have paths to explore further in the queue
while((firstPath = pathsQ.poll())!=null){
firstReadSet = readsetQ.poll();
//obtain the vertex at the end for this path
Node lastVertex = firstPath.getLastVertex(this.g);
//if the last vertex is t, then we add this path in the result
if(lastVertex.equals(t)){
results.add(firstPath);
}else{//otherwise, we need to explor the paths further
itr = this.g.outgoingEdgesOf(lastVertex).iterator();
while(itr.hasNext()){
Path tmpP = firstPath.deepCopy();
CustomHashMap tmpReadSet = firstReadSet.clone();
CustomWeightedEdge nextE = itr.next();
tmpReadSet.intersectionPE(nextE.getReadHashSet());
if(firstReadSet.size() > 0){ // we only add if intersection size is > 0. This greatly prunes paths that are needed to be explored.
tmpP.appendEdge(nextE);//itr.next());
pathsQ.add(tmpP);
readsetQ.add(tmpReadSet);
}
}
}
}
return results;
}
//modified so that if pre node is null, create curnode but dont' attempt to connect w/ an edge
private Node addMissingNode(char b, int colPos, Node cur, Node pre, boolean isRefStrand, byte qual, int readNum){
cur = new Node(b, colPos);
this.g.addVertex(cur);
//this.nodeHashList.get(colPos - 1).put(new Character(b), cur);
this.nodeHashList.get(colPos - 1).put(new Integer(Base.char2ibase(b)), cur);
if(pre != null){
//DefaultWeightedEdge e = this.g.addEdge(pre, cur);
this.addAndIncrement(pre, cur, isRefStrand, qual, readNum);
}//moved readHash to edges
/*else{
//cur.addRead(readNum);
//this.addReadToEdge()
}*/
return cur;
}
private void addAndIncrement(Node source, Node target, boolean isRefStrand, byte qual, int readNum){
//target.addRead(readNum); //moved readHash to edges
CustomWeightedEdge e = this.g.addEdge(source,target);
e.addRead(readNum, qual);
this.g.setEdgeWeight(e, 0.0d);
e.incrementWeight(this.g, isRefStrand, qual);
}
//private void incrementWeight(Node source, Node target){
private void incrementWeight(Node source, Node target, boolean isRefStrand, byte qual, int readNum){
//DefaultWeightedEdge e = g.getEdge(source, target);
//target.addRead(readNum);
CustomWeightedEdge e = g.getEdge(source, target);
if(e == null)
this.addAndIncrement(source,target, isRefStrand, qual, readNum);
else{
e.addRead(readNum, qual);
//target.addRead(readNum); //moved readHash to edges
e.incrementWeight(this.g, isRefStrand, qual);//g.setEdgeWeight(e, g.getEdgeWeight(e)+1);
}
}
//readNum is a readIdentifier [int]
public int addWeight(SAMRecord sr, int readNum){
int numOp = 0;
Cigar cigar = sr.getCigar();
byte[] bases = sr.getReadBases(); //ASCII bytes ACGTN=.
byte[] quals = sr.getBaseQualities();
for(int i=0; i<quals.length; i++){
if(quals[i] < 2)
quals[i] = 2;
}
int baseIndex = 0;
int refBasePos = sr.getAlignmentStart();
Node prevnode = null;
Node curnode = null;
Base curbase = null;
Sequence curAllele = this.alleleHash.get(sr.getReferenceName());
int colPos = curAllele.getColPosFromBasePos(refBasePos);
boolean isRefStrand = !sr.getReadNegativeStrandFlag();
/*
System.err.println(sr.toString());
System.err.println("start position:\t" + refBasePos);
System.err.println("Mapped Allele:\t" + sr.getReferenceName());
System.err.println("Allele Name:\t" + curAllele.getAlleleName());
System.err.println("CIGAR:\t" + sr.getCigar());
System.err.println("READ:\t" + sr.getReadString());
System.err.println("READL:\t" + bases.length);
System.err.println("ColPos:\t" + colPos);
for(int i=0; i<bases.length; i++){
System.err.print(Base.char2ibase((char)bases[i]));
}
System.err.println();
*/
//curAllele.printPositions(colPos-1, bases.length);
if(cigar==null) return 0;
for(final CigarElement ce : cigar.getCigarElements()){
//System.err.println(ce.toString() + "\t" + ce.getLength());
CigarOperator op = ce.getOperator();
int cigarLen = ce.getLength();
switch(op)
{
case S :
{
baseIndex += cigarLen;
break;
}
case H :
break;
case M :
{
for(int i=0; i<cigarLen; i++){
numOp++;
/* takes care of jumping over padding area (gaps) in MSA */
int tmpColPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
if(tmpColPos > colPos){
for(int j=colPos;j<tmpColPos;j++){
HLA.HOPPING++;
curnode = this.nodeHashList.get(j-1).get(new Integer(Base.char2ibase('.')));
this.incrementWeight(prevnode,curnode,isRefStrand, quals[baseIndex-1], readNum);
prevnode=curnode;
}
colPos = tmpColPos;
}
curnode = this.nodeHashList.get(colPos -1).get(new Integer(Base.char2ibase((char)bases[baseIndex])));
/* if NO such node is found, we add new node and add edge from prevnode.
mismatch that is not covered by reference sequence */
if(curnode == null){
HLA.NEW_NODE_ADDED++;
curnode = this.addMissingNode((char)bases[baseIndex], colPos, curnode, prevnode, isRefStrand, quals[baseIndex], readNum);
if(curnode == null)
System.err.println("IMPOSSIBLE: curnode NULL again after adding missing node!");
}
else if(prevnode != null)/* if prevnode is not set. firstBase*/
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
prevnode=curnode;
baseIndex++;
//refBasePos++;
colPos++;
//colPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
//colPos++;
}
break;
}
case D :
{
for(int i=0; i<cigarLen; i++){
numOp++;
/* takes care of jumping over padding area (gaps) in MSA */
int tmpColPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
if(tmpColPos > colPos){
for(int j=colPos;j<tmpColPos;j++){
HLA.HOPPING++;
curnode = this.nodeHashList.get(j-1).get(new Integer(Base.char2ibase('.')));
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex-1], readNum);
prevnode=curnode;
}
colPos = tmpColPos;
}
/* need to grab gap node at current column */
curnode = this.nodeHashList.get(colPos - 1).get(new Integer(Base.char2ibase('.')));
/* if NO such node is found, we add new node and add edge from prevnode */
if(curnode == null){
HLA.NEW_NODE_ADDED++;
curnode = this.addMissingNode('.', colPos, curnode, prevnode, isRefStrand, quals[baseIndex-1], readNum);
}else
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex-1], readNum);
prevnode=curnode;
//refBasePos++;
colPos++;
}
break;
}
case I :
{
// Need to check the colPos distance to nextBase in the allele to see if there are spaces for insertion.
// If there are spaces for insertion, must insert into those spaces first then insert into insertionNodeHash.
int insertionIndex = -1;
for(int i=0; i<cigarLen; i++){
HLA.INSERTION++;
numOp++;
int tmpColPos = curAllele.getNextColPosForBase(colPos - 1) + 1;
if(tmpColPos == colPos){//then we must insert into insertionNodeHashList
insertionIndex++;
if(this.insertionNodeHashList.get(colPos - 1).size() > insertionIndex){
//curnode = this.insertionNodeHashList.get(colPos - 1).get(i).get(new Character((char)bases[baseIndex]));
curnode = this.insertionNodeHashList.get(colPos - 1).get(insertionIndex).get(new Integer(Base.char2ibase((char)bases[baseIndex])));
}else{//we need to add extra position (insertion length)
//this.insertionNodeHashList.get(colPos - 1).add(new HashMap<Character, Node>());
this.insertionNodeHashList.get(colPos - 1).add(new HashMap<Integer, Node>());
curnode = null;
}
if(curnode == null){
curnode = new Node((char)bases[baseIndex], colPos);
HLA.INSERTION_NODE_ADDED++;
this.g.addVertex(curnode);
this.insertionNodeHashList.get(colPos - 1).get(insertionIndex).put(new Integer(Base.char2ibase((char)bases[baseIndex])), curnode);
this.addAndIncrement(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
//DefaultWeightedEdge e = this.g.addEdge(prevnode, curnode);
//this.g.setEdgeWeight(e, 0.0d);
//this.incrementWeight(prevnode, curnode, isRefStrand,quals[baseIndex]);
}else{
//this.incrementWeight(prevnode, curnode);
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
}
prevnode = curnode;
baseIndex++;
}else if(tmpColPos > colPos){//then we must insert here.
curnode = this.nodeHashList.get(colPos - 1).get(new Integer(Base.char2ibase((char)bases[baseIndex])));
if(curnode == null){
HLA.NEW_NODE_ADDED++;
//curnode = this.addMissingNode((char)bases[baseIndex], colPos, curnode, prevnode);
curnode = this.addMissingNode((char)bases[baseIndex], colPos, curnode, prevnode, isRefStrand, quals[baseIndex], readNum);
if(curnode == null){
System.err.println("IMPOSSIBLE: curnode NULL again after adding missing node! (1)[addWeight]");
System.exit(9);
}
}else if(prevnode !=null){
HLA.INSERTION_WITH_NO_NEW_NODE++;
//this.incrementWeight(prevnode, curnode);
this.incrementWeight(prevnode, curnode, isRefStrand, quals[baseIndex], readNum);
}else if(prevnode == null){
System.err.println("SHOULD NOT HAPPEND (2)[addWeight]");//can't start with insertion
System.exit(9);
}
prevnode = curnode;
baseIndex++;
colPos++;
insertionIndex = -1;
}else{//should not happen.
System.err.println("SHOULD NOT HAPPEND (3)[addWeight]");
System.exit(9);
}
}
break;
}
default: System.err.println("UNKNOWN CIGAROP:\t" + ce.toString());
break;
}
}
return numOp;
}
private void buildGraph(){
int numAlleles = this.alleles.size();
Sequence firstAllele = this.alleles.get(0);
/* for each alleles*/
//Node sNode = new Node('s', 0);
//Node tNode = new Node('t', this.alleles.get(0).getColLength() + 1);
//this.g.addVertex(sNode);
//this.g.addVertex(tNode);
for(int i=0; i<numAlleles; i++){
//System.err.println("allele " + i);
Sequence curSeq = this.alleles.get(i);
/* for each base in allele */
Node prevNode = sNode;
for(int j=0; j<curSeq.getColLength(); j++){
//System.err.print("[" + j + "]");
if(i==0){
//this.nodeHashList.add(new HashMap<Character, Node>());
this.nodeHashList.add(new HashMap<Integer, Node>());
//this.insertionNodeHashList.add(new ArrayList<HashMap<Character, Node>>());
this.insertionNodeHashList.add(new ArrayList<HashMap<Integer, Node>>());
}
//HashMap<Character, Node> curHash = nodeHashList.get(j);
HashMap<Integer, Node> curHash = nodeHashList.get(j);
//Character curChar = new Character(curSeq.baseAt(j).getBase());
Integer curInt = new Integer(curSeq.baseAt(j).getIBase());
//Node tmpNode = curHash.get(curChar); //retrieve node
Node tmpNode = curHash.get(curInt); //retrieve node
if(tmpNode == null){ //if we have not added this node
tmpNode= new Node(curSeq.baseAt(j));
this.g.addVertex(tmpNode);
//curHash.put(curChar,tmpNode);
curHash.put(curInt,tmpNode);
}
//add an edge
//DefaultWeightedEdge e;
CustomWeightedEdge e;
if(!this.g.containsEdge(prevNode, tmpNode)){
e = this.g.addEdge(prevNode,tmpNode);
if(prevNode.equals(sNode)){
//System.err.println("Edge from sNode");
//if(this.g.getEdge(prevNode,tmpNode) == null)
// System.err.println("\tIMPOSSIBLE!!!!");
//System.err.println("prevNode\t:" + prevNode.toString() + "\tcurNode\t:" + tmpNode.toString());
this.g.setEdgeWeight(e, Double.MAX_VALUE);
}else
this.g.setEdgeWeight(e, 0.0d);
}
prevNode = tmpNode;
}
//add edge
if(!this.g.containsEdge(prevNode, tNode))
this.g.setEdgeWeight(this.g.addEdge(prevNode, tNode), Double.MAX_VALUE);
}
}
public void printStartEndNodeInfo(){
System.err.println(this.sNode.toString() + "|ind("+this.g.inDegreeOf(this.sNode) + ":outd(" + this.g.outDegreeOf(this.sNode ) + ")");
System.err.println(this.tNode.toString() + "|ind("+this.g.inDegreeOf(this.tNode) + ":outd(" + this.g.outDegreeOf(this.tNode ) + ")");
}
public double getTotalWeightForColumn(HashMap<Integer, Node> m, Node preNode){
double totalWeight = 0;
Node curNode = null;
for(int i=0;i<6;i++){
curNode = m.get(new Integer(i));
CustomWeightedEdge e = this.g.getEdge(preNode,curNode);
if(e!=null)
totalWeight += this.g.getEdgeWeight(e);
}
return totalWeight;
}
public ArrayList<int[]> obtainTypingIntervals(){
Sequence ref = this.alleles.get(0);
ArrayList<int[]> typingIntervals = new ArrayList<int[]>();
if(this.isClassI()){
/* typing exon 2 + intron + exon 3 */
/*
int[] tmp = new int[2];
tmp[0] = ref.getBoundaries()[3];
tmp[1] = ref.getBoundaries()[6];
typingIntervals.add(tmp);
*/
/* typing only exon 2 and 3 */
int[] tmp = new int[2];
tmp[0] = ref.getBoundaries()[3];
tmp[1] = ref.getBoundaries()[4];
typingIntervals.add(tmp);
int[] tmp2 = new int[2];
tmp2[0] = ref.getBoundaries()[5];
tmp2[1] = ref.getBoundaries()[6];
typingIntervals.add(tmp2);
}else if (this.isClassII()){
int[] tmp2 = new int[2];
tmp2[0] = ref.getBoundaries()[3];
tmp2[1] = ref.getBoundaries()[4];
typingIntervals.add(tmp2);
}
return typingIntervals;
}
public boolean traverseAndWeights(){
System.err.println("=========================");
System.err.println("= " + this.HLAGeneName);
System.err.println("=========================");
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
Node preNode = null;
Node curNode = null;
for(int i=0; i<this.alleles.size(); i++){
preNode = null;
curNode = null;
Sequence curseq = this.alleles.get(i);
double exonSum = 0.0d;
double exonSump = 0.0d;
int exonNumZero = 0;
int noEdge = 0;
double exonFlow = Double.MAX_VALUE;
StringBuffer out = new StringBuffer();
out.append(curseq.getAlleleName() + "\n");
boolean intact = true;
eachallele:
for(int j=0; j<typingIntervals.size(); j++){
int start = typingIntervals.get(j)[0];
int end = typingIntervals.get(j)[1];
preNode = null;
out.append("\nNEXT_EXON\n");
//need to start a node before the exon start, hence -2, rather than -1 transformation from 1-based to 0-based index
//k should be 0-based. start and end are 1-based (inclusive, exclusive) index.
for(int k=start-2; k<end-1; k++){
char uchar = Character.toUpperCase(curseq.baseAt(k).getBase());
HashMap<Integer, Node> curHash = this.nodeHashList.get(k);
curNode = this.nodeHashList.get(k).get(new Integer(Base.char2ibase(uchar)));
if(curNode == null){
preNode = curNode;
intact = false;
break eachallele;
}
if(preNode != null){
CustomWeightedEdge e = this.g.getEdge(preNode, curNode);
if(e == null){
noEdge++;
out.append(uchar + "[NO_EDGE]->");
exonFlow = -1.0d;
//break;
}else{
double tmpw = this.g.getEdgeWeight(e);
double total = this.getTotalWeightForColumn(this.nodeHashList.get(j), preNode);
if(tmpw > 0.0d){
exonSum+=tmpw;
if(tmpw/total < 0.25d){
out.append(("(E)LOWPROB ->\t" + e.getGroupErrorProb() + "\t" + (tmpw/total)) + "\n");
}else{
exonSump+=tmpw;
}
}
if(tmpw == 0.0d)
exonNumZero++;
if(tmpw < exonFlow){
exonFlow = tmpw;
}
out.append(uchar + "[" + tmpw + "]->");
}
}
preNode = curNode;
}
}
if(intact){
out.append(("\n" + curseq.getAlleleName() + "\tNO_EDGE:\t" + noEdge +"\tE_SUM:\t" + exonSum + "\tE_ZERO:\t" + exonNumZero + "\tE_SUM_P\t" + exonSump + "\tMAXFLOW\t" + exonFlow + "\n"));
//out.append(("\n" + curseq.getAlleleName() + "\tSUM:\t" + sum + "\t#ZERO:\t" + numZero + "\tE_SUM:\t" + exonSum + "\tE_ZERO:\t" + exonNumZero + "\tSUM_P:\t" + sump + "\tE_SUM_P\t" + exonSump + "\tMAXFLOW\t" + exonFlow + "\n"));
System.err.println(out.toString());
}
}
return true;
}
public void traverse(){
System.err.println("Traversing (" + this.alleles.size() + ")");
Node preNode;// = this.sNode;
Node curNode;
for(int i=0; i<this.alleles.size(); i++){
this.alleles.get(i).verify();
preNode = this.sNode;
Sequence curseq = this.alleles.get(i);
for(int j=0; j<curseq.getColLength(); j++){
//System.err.println("Traversing [" + i + "," + j + "]");
char uchar = Character.toUpperCase(curseq.baseAt(j).getBase());
char lchar = Character.toUpperCase(curseq.baseAt(j).getBase());
//HashMap<Character, Node> curHash = this.nodeHashList.get(j);
HashMap<Integer, Node> curHash = this.nodeHashList.get(j);
//if(curHash.get(new Character(uchar)) != null){
if(curHash.get(new Integer(Base.char2ibase(uchar))) != null){
//System.err.println("NODE FOUND IN HASH[UPPER}");
//curNode = curHash.get(new Character(uchar));
curNode = curHash.get(new Integer(Base.char2ibase(uchar)));
/*
if(this.g.getEdge(preNode, curNode) == null)
System.err.println("\tWRONG, THIS SHOULD ALREADY BE IN THE GRAPH.\n" + "prevNode\t:" + preNode.toString() + "\tcurNode\t:" + curNode.toString());
else
System.err.println("Weight : " + this.g.getEdgeWeight(this.g.getEdge(preNode,curNode)));
*/
preNode = curNode;
//}else if(curHash.get(new Character(lchar)) != null){
}else if(curHash.get(new Integer(Base.char2ibase(lchar))) != null){
//System.err.println("NODE FOUND IN LOWER}");
//curNode = curHash.get(new Character(lchar));
curNode = curHash.get(new Integer(Base.char2ibase(lchar)));
/*
if(this.g.getEdge(preNode, curNode) == null)
System.err.println("\tWRONG, THIS SHOULD ALREADY BE IN THE GRAPH.");
else
System.err.println("Weight : " + this.g.getEdgeWeight(this.g.getEdge(preNode,curNode)));
*/
preNode = curNode;
}else{
;//System.err.println("NODE NOT FOUND IN THH GRAPH");
}
}
}
System.err.println("DONE Traversing");
}
public void updateEdgeWeightProb(){
Set<CustomWeightedEdge> eSet = g.edgeSet();
Iterator<CustomWeightedEdge> itr = eSet.iterator();
CustomWeightedEdge e = null;
while(itr.hasNext()){
e = itr.next();
e.computeGroupErrorProb();
//System.err.println(e.toString());
}
}
public boolean isClassI(){
if( this.HLAGeneName.equals("A")
|| this.HLAGeneName.equals("B")
|| this.HLAGeneName.equals("C")
){
return true;
}
return false;
}
public boolean isClassII(){
if( //this.HLAGeneName.equals("DPA1")
//|| this.HLAGeneName.equals("DPB1")
this.HLAGeneName.equals("DQA1")
|| this.HLAGeneName.equals("DQB1")
//|| this.HLAGeneName.equals("DRA")
|| this.HLAGeneName.equals("DRB1")
){
return true;
}
return false;
}
/*
* countBubbles() --> returns ArrayList of simple bubbles (NOT merged)
* and sets interbubbleSequences in this class.
*
* processBubbles() --> merges bubbles by checking reads support information.
*/
public void countBubblesAndMerge(StringBuffer rb){
this.resultBuffer = rb;
this.processBubbles(this.countBubbles());
}
public void processBubbles(ArrayList<Bubble> bubbles){
/* to load actual bubble sequence in each paths found in each bubble */
System.err.println("**************************");
System.err.println("Checking numBubbles: " + bubbles.size());
for(int i=0; i<bubbles.size(); i++){
if(bubbles.get(i).isFirstBubble()){
System.err.println("Bubble (" + i + "):\t[FB]" );
}
bubbles.get(i).initBubbleSequences();
}
/* superBubble is a merged bubbles. Ideally, you want to have just one bubble. */
ArrayList<Bubble> superBubbles = new ArrayList<Bubble>();
Bubble curSuperBubble = bubbles.get(0);
Bubble lastMergedBubble = curSuperBubble;
int lastSegregationColumnIndex = curSuperBubble.getStart().get(0);
System.err.println("(iteration 0):\t" + curSuperBubble.getNumPaths());
for(int i=1; i<bubbles.size(); i++){
System.err.println("\t(attempting merging)\t" + bubbles.get(i).getNumPaths());
bubbles.get(i).printBubbleSequence();
System.err.print("(SB)\t");
curSuperBubble.printBubbleSequenceSizes();
System.err.print("(OB)\t");
bubbles.get(i).printBubbleSequenceSizes();
//boolean phased = curSuperBubble.mergeBubble(bubbles.get(i));
MergeStatus ms = null;
if(!bubbles.get(i).isFirstBubble()){
ms = curSuperBubble.mergeBubble(bubbles.get(i), lastSegregationColumnIndex, this.isClassII(), lastMergedBubble);
lastMergedBubble = bubbles.get(i);
}
//if we are cutting here
if(bubbles.get(i).isFirstBubble() || ms.isSplit()){
if(bubbles.get(i).isFirstBubble())
System.out.println("NOT PHASING OVER DIFFERENT EXONS --> setting OB as curSuperBubble");
else
System.out.println("CANT PHASE --> setting OB as curSuperBubble.");
superBubbles.add(curSuperBubble);
curSuperBubble = bubbles.get(i);
lastMergedBubble = curSuperBubble;
//need to update segregationColumnIndex
lastSegregationColumnIndex = curSuperBubble.getStart().get(0);
}
//if not cutting
else{
//if we have a segreation, need to updated segregationColumnIndex
if(ms.isSegregating())
lastSegregationColumnIndex = ms.getLastSegregationColumnIndex();
System.err.println("**********************************");
curSuperBubble.printBubbleSequenceSizes();
System.err.println("**********************************");
curSuperBubble.printBubbleSequence();
}
System.err.println("(iteration " + i + "):\t" + curSuperBubble.getNumPaths());
}
superBubbles.add(curSuperBubble);
System.err.println("\n\n<
this.checkSuperBubbleLinkages(superBubbles);
//this.printBubbleResults(superBubbles, bubbles);
//this.compareInterBubbles(superBubbles);
ArrayList<ArrayList<AllelePath>> fracturedPaths = this.getFracturedPaths(superBubbles, bubbles);
this.allelePathPrintTest(fracturedPaths);//print test of fractured candidate. print super bubble sequences
this.allelePathToFastaFile(fracturedPaths);//writes superbubble sequences as fasta file
ArrayList<SuperAllelePath> superpaths = this.generateSuperAllelePaths(fracturedPaths);
this.superAllelePathToFastaFile(superpaths); //writes full length candidate allele concatenating super bubbles as fasta file
this.printScoreForMaxLikeliPair(superpaths, superBubbles);
this.pathAlign(superpaths); // aligns to DB for typing.
}
public void printScoreForMaxLikeliPair(ArrayList<SuperAllelePath> superpaths, ArrayList<Bubble> superBubbles){
for(int i = 0; i<superpaths.size(); i++){
for(int j=i; j<superpaths.size(); j++){
double[] scores = superpaths.get(i).getJointProbability(superpaths.get(j), superBubbles);
double[] jointWeightFlow = superpaths.get(i).jointTraverse(superpaths.get(j), this.g);
System.err.println("AllelePair [" + i + ":" + j + "]\t{ + "
+ scores[0] + "\t"
+ scores[1] + "\t"
+ scores[2]
+ "\tE_SUM:" + jointWeightFlow[0]
+ "\tMAXFLOW:" + jointWeightFlow[1]
+ "}");
}
}
int count = 0;
for(SuperAllelePath sap : superpaths){
double[] weightFlow = sap.traverse(this.g);
System.err.println("Superpath[" + count + "]\tE_SUM:" + weightFlow[0] + "\tMAXFLOW:" + weightFlow[1]);
count++;
}
}
public void pathAlign(ArrayList<SuperAllelePath> superpaths){
int count = 1;
for(SuperAllelePath sap : superpaths){
String candidate = sap.getSequenceBuffer().toString();//p.toString(this.g, count);//, this.headerExcessLengthBeyondTypingBoundary, this.tailExcessLengthBeyondTypingBoundary);
count++;
String sapname = sap.toSimpleString();
String subject = null;
//String maxName = null;
ArrayList<String> maxName = new ArrayList<String>();
int maxIdenticalLen = 0;
//ArrayList<Integer> maxIdenticalLen = new ArrayList<Integer>();
//Result maxR = null;
ArrayList<Result> maxR =new ArrayList<Result>();
boolean foundPerfect = false;
for(HLASequence subjscan : this.typingSequences){
subject = subjscan.getSequence();
if(candidate.equals(subject)){
Result curR = new Result(candidate.length(), subject);
//maxIdenticalLen = curR.getIdenticalLen();
//maxName = subj.getGroup().getGroupString();
maxR.add(curR);
maxName.add(subjscan.getGroup().getGroupString());
System.err.println("Found perfect match.");
foundPerfect = true;
break;
}
}
if(!foundPerfect){
for(HLASequence subj : this.typingSequences){
subject = subj.getSequence();
Result curR = NWAlign.runDefault(candidate, subject);
/*if(subj.getGroup().getGroupString().equals("A*01:01:01G")){
System.err.println(candidate);
System.err.println(subject);
System.err.println("A*01:01:01G\t" + curR.toString());
}*/
if(curR.getIdenticalLen() >= maxIdenticalLen){
if(curR.getIdenticalLen() > maxIdenticalLen){
maxName = new ArrayList<String>();
maxIdenticalLen = curR.getIdenticalLen();
maxR =new ArrayList<Result>();
}
//maxName.add(subj.getGroup().getGroupString());
//maxIdenticalLen.add(curR.getIdenticalLen());
maxName.add(subj.getGroup().getGroupString());
maxR.add(curR);
}
}
}
//System.err.print("BEST MATCH:" + );
for(int i=0;i<maxR.size();i++){
System.err.println("["+ sapname+ "]BEST MATCH:\t" + maxName.get(i) + "\t" + maxR.get(i).getIdenticalLen() + "\t" + maxR.get(i).getIdentity());
this.resultBuffer.append(maxName.get(i) + "\t" + maxR.get(i).getIdenticalLen() + "\t" + maxR.get(i).getIdentity() + "\t" + maxR.get(i).getScore() + sapname + "\n");
}
//System.err.println("BEST MATCH:\t" + maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity());
//this.resultBuffer.append(maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity() + "\t" + maxR.getScore() + sapname+"\n");
//this.resultBuffer.append(maxR.toAlignmentString() + "\n");
}
}
/*
public void printBubbleResults(ArrayList<Bubble> superBubbles){
int startIndex = 0;
System.out.println("Printing\t" + superBubbles.size() + "\tfractured super bubbles.");
int count = 0;
for(Bubble sb : superBubbles){
System.out.println("\tSuperBubble\t" + count);
startIndex = sb.printResults(this.interBubbleSequences, startIndex);
count++;
}
}
*/
public void checkSuperBubbleLinkages(ArrayList<Bubble> superBubbles){
ArrayList<int[]>[] pLists = new ArrayList[(superBubbles.size()-1)*superBubbles.size()/2];
int count = 0;
/* for each superBubble*/
for(int i=0;i<superBubbles.size();i++){
Bubble sb_i = superBubbles.get(i);
/* pairing with another superBubble */
for(int j=i+1; j<superBubbles.size();j++){
Bubble sb_j = superBubbles.get(j);
//int[0]: path index for first bubble
//int[1]: path index for second bubble
//int[2]: number of reads supporting this phasing path
ArrayList<int[]> phasedList = sb_i.getPhasedSuperBubbles(sb_j);
pLists[count] = phasedList;
count++;
if(phasedList.size() > 0){
System.err.println("Phasing evidence FOUND between SB(" + i + ") : SB(" + j + ")" );
for(int[] index : phasedList)
System.err.println("SB(" + i + ")-" + index[0] + " : SB(" + j + ")-" + index[1]);
}else
System.err.println("NO phasing evidence between SB(" + i + ") : SB(" + j + ")" );
}
}
}
public void selectGreedyForSuperBubbleLinking(ArrayList<int[]>[] phasedLists){
//for(ArrayList<int[]>)
}
/*
public void compareInterBubbles(ArrayList<Bubble> superBubbles){
//System.out.println(">>>>>>>>>>>>>>>> Checking interbubbles <<<<<<<<<<");
//for(int i=0; i<this.interBubbleSequences.size();i++){
// System.out.println("[I" + i + "]:\t" + this.interBubbleSequences.get(i).toString() + "\t" + this.interBubblePaths.get(i).toSimplePathString(this));
// }
int k = 0;
for(int i=0; i<superBubbles.size(); i++){
Bubble sb = superBubbles.get(i);
Path firstPath = sb.getPaths().get(0);
ArrayList<StringBuffer> bubbleSequences = firstPath.getBubbleSequences();
ArrayList<CustomWeightedEdge> orderedEdgeList = firstPath.getOrderedEdgeList();
int curEdgePos = 0;
int curMaxPos = 0;
for(int j=0; j<bubbleSequences.size(); j++){
System.out.println("[I" + k + "]:\t" + this.interBubbleSequences.get(k).toString() + "\t" + this.interBubblePaths.get(k).toSimplePathString(this));
k++;
System.out.print("[B:" + j +"]" + bubbleSequences.get(j).toString() + "\t");
curMaxPos += sb.getBubbleLengths().get(j).intValue();
for(;curEdgePos < curMaxPos; curEdgePos++){
System.out.print(this.g.getEdgeTarget(orderedEdgeList.get(curEdgePos)).getBase());
}
System.out.println();
}
}
}
*/
public void setFileName(String f){
this.outputfilename = f;
}
public ArrayList<DNAString> generateCandidates(ArrayList<ArrayList<DNAString>> fracturedSequences){
ArrayList<DNAString> sequences = new ArrayList<DNAString>();
for(DNAString ds : fracturedSequences.get(0)){
sequences.add(ds.deepCopy());
}
//for superBubble
for(int i=1; i<fracturedSequences.size(); i++){
ArrayList<DNAString> otherSequences = fracturedSequences.get(i);
ArrayList<DNAString> results = new ArrayList<DNAString>();
for(int j=0; j < sequences.size(); j++){
for(int k=0; k < otherSequences.size(); k++){
results.add(sequences.get(j).mergeDeep(otherSequences.get(k)));
}
}
sequences = results;
}
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa.candidates"));
for(DNAString seq : sequences)
bw.write(seq.toFasta().toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
return sequences;
}
/*
public ArrayList<ArrayList<Path>> mergePathsOverSuperBubbles(ArrayList<Bubble> superBubbles){
int startIndex = 0;
int count = 0;
ArrayList<ArrayList<Path>> fracturedPaths = new ArrayList<ArrayList<Path>>();
for(Bubble sb : superBubbles){
ArrayList<Path> paths = new ArrayList<Path>();
fracturedPaths.add(paths);
}
}
*/
/*
public void printBubbleResults(ArrayList<Bubble> superBubbles, ArrayList<Bubble> bubbles){
//StringBuffer output = new StringBuffer();
int startIndex = 0;
System.out.println("Printing\t" + superBubbles.size() + "\tfractured super bubbles.");
//output.append(superBubbles.size() + "\tfractured SuperBubbles\n");
int count = 0;
//over each super bubble
ArrayList<ArrayList<DNAString>> fracturedSequences = new ArrayList<ArrayList<DNAString>>();
int bubbleOffset = 0;
Bubble pre = null;
for(Bubble sb : superBubbles){
if(pre != null){
bubbleOffset += pre.numBubbles();
}
ArrayList<DNAString> sequences = new ArrayList<DNAString>();
fracturedSequences.add(sequences);
System.out.println("\tSuperBubble\t" + count);
System.out.println("\t\tbubbleOffset:\t" + bubbleOffset);
startIndex = sb.printResults(this.interBubbleSequences, startIndex, sequences, this.HLAGeneName , count, bubbles, bubbleOffset);
count++;
pre = sb;
}
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa"));
for(ArrayList<DNAString> fseq : fracturedSequences){
for(DNAString ds : fseq)
bw.write(ds.toFasta().toString());
}
//bw.write(output.toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
ArrayList<DNAString> candidateAlleles = this.generateCandidates(fracturedSequences);
//this.candidateAlign(candidateAlleles);
}
*/
public ArrayList<ArrayList<AllelePath>> getFracturedPaths(ArrayList<Bubble> superBubbles, ArrayList<Bubble> bubbles){
int startIndex = 0;
int count = 0;
System.out.println("Printing\t" + superBubbles.size() + "\tfractured super bubbles.");
//inner list holds paths found for one superBubble
//outer list holds multiple superBubbles
ArrayList<ArrayList<AllelePath>> fracturedPaths = new ArrayList<ArrayList<AllelePath>>();
int bubbleOffset = 0;
Bubble presb = null;
int sbIndex = 0;
for(Bubble sb : superBubbles){
if(presb != null){
bubbleOffset += presb.numBubbles();
}
ArrayList<AllelePath> paths = new ArrayList<AllelePath>();
fracturedPaths.add(paths);
//NEED TO ADD TRIM FUNCTIONALITY FOR HEADER AND TAIL BUBBLES!!! --> Trim function ADDED
startIndex = sb.mergePathsInSuperBubbles(this.interBubblePaths2, startIndex, paths, this.HLAGeneName, count, this.g, bubbles, bubbleOffset);
count++;
presb = sb;
}
return fracturedPaths;
//this.pathPrintTest(this.generateCandidatePaths(fracturedPaths));
//this.pathAlign(this.generateCandidatePaths(fracturedPaths));
}
public ArrayList<SuperAllelePath> generateSuperAllelePaths(ArrayList<ArrayList<AllelePath>> fracturedSequences){
ArrayList<SuperAllelePath> superpaths = new ArrayList<SuperAllelePath>();
//for(AllelePath ap : fracturedSequences.get(0))
for(int i=0; i<fracturedSequences.get(0).size();i++){
AllelePath ap = fracturedSequences.get(0).get(i);
superpaths.add(new SuperAllelePath(this.HLAGeneName));
superpaths.get(superpaths.size()-1).addAllelePath(ap, i);
}
for(int i=1; i<fracturedSequences.size(); i++){
ArrayList<AllelePath> nextSequences = fracturedSequences.get(i);
ArrayList<SuperAllelePath> results = new ArrayList<SuperAllelePath>();
for(int j=0; j<superpaths.size(); j++){
for(int k=0; k < nextSequences.size(); k++){
results.add(superpaths.get(j).clone());
results.get(results.size()-1).addAllelePath(nextSequences.get(k), k);
}
}
superpaths = results;
}
return superpaths;
}
public void allelePathPrintTest(ArrayList<ArrayList<AllelePath>> fracturedAllelePaths){
for(int i=0; i<fracturedAllelePaths.size(); i++){
ArrayList<AllelePath> paths = fracturedAllelePaths.get(i);
System.out.println("SUPER BUBBLE [" + i + "]");
for(int j=0; j<paths.size(); j++){
AllelePath ap = paths.get(j);
ap.printPath(this.g, i, j);
}
}
}
public void allelePathToFastaFile(ArrayList<ArrayList<AllelePath>> fracturedAllelePaths){
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa"));
for(ArrayList<AllelePath> faps : fracturedAllelePaths){
for(AllelePath ap : faps){
bw.write(ap.toFasta().toString());
}
//bw.close();
}
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
}
public void superAllelePathToFastaFile(ArrayList<SuperAllelePath> superAllelePaths){
BufferedWriter bw = null;
try{
bw = new BufferedWriter(new FileWriter(this.outputfilename + "_" + this.HLAGeneName + ".typed.fa.candiates"));
for(SuperAllelePath sap : superAllelePaths)
bw.write(sap.toFasta().toString());
bw.close();
}catch(IOException ioe){
ioe.printStackTrace();
}
}
/*
public void getFracturedPathsOLD(ArrayList<Bubble> superBubbles, int[] headerExcessArr, int[] tailExcessArr){
int startIndex = 0;
int count = 0;
//inner list holds paths found for one superBubble
//outer list holds multiple superBubbles
ArrayList<ArrayList<Path>> fracturedPaths = new ArrayList<ArrayList<Path>>();
Bubble presb = null;
ArrayList<Path> prePaths = null;
Bubble sb = null;
int firstBubbleCount = 0;
int headerExcess,tailExcess;
//for(sb : superBubbles){
for(int i=0;i<superBubbles.size(); i++){
sb = superBubbles.get(i);
ArrayList<Path> paths = new ArrayList<Path>();
fracturedPaths.add(paths);
startIndex = sb.mergePathsInSuperBubbles(this.interBubblePaths, startIndex, paths, this.HLAGeneName, count);
if(sb.isFirstBubble()){
headerExcess = headerExcessArr[firstBubbleCount];
tailExcess = (firstBubbleCount > 0 ? tailExcessArr[firstBubbleCount-1] : 0);
if(presb != null){
for(Path p : prePaths)
p.trimExcess(0, tailExcess);
}
for(Path p: paths)
p.trimExcess(headerExcess, 0);
firstBubbleCount++;
presb = sb;
prePaths = paths;
}
count++;
}
if(sb !=null && tailExcessArr[firstBubbleCount-1] > 0){
for(Path p : prePaths)
p.trimExcess(0, tailExcessArr[firstBubbleCount-1]);
}
//this.pathPrintTest(this.generateCandidatePaths(fracturedPaths));
this.pathAlign(this.generateCandidatePaths(fracturedPaths));
}
public void pathPrintTest(ArrayList<Path> ps){
int count = 1;
for(Path p : ps){
p.printPath(this.g, count);//, this.headerExcessLengthBeyondTypingBoundary, this.tailExcessLengthBeyondTypingBoundary);
count++;
}
}
*/
public void candidateAlign(ArrayList<DNAString> candidates){
int count = 1;
for(DNAString candidateDNA : candidates){
String candidate = candidateDNA.getSequence();
String subject = null;
String maxName = null;
String maxHit = null;
int maxIdenticalLen = 0;
Result maxR = null;
for(HLASequence subj : this.typingSequences){
subject = subj.getSequence();
Result curR = NWAlign.runDefault(candidate, subject);
/*if(subj.getGroup().getGroupString().equals("A*01:01:01G")){
System.err.println(candidate);
System.err.println(subject);
System.err.println("A*01:01:01G\t" + curR.toString());
}*/
if(curR.getIdenticalLen() >= maxIdenticalLen){
maxIdenticalLen = curR.getIdenticalLen();
maxName = subj.getGroup().getGroupString();
maxR = curR;
maxHit = subject;//curR.getHit();
if(curR.getIdentity() == 1.0d){
System.err.println("Found perfect match.");
break;
}
}
}
System.err.println("BEST MATCH:\t" + maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity());
System.err.println("Query:\n"+candidate);
System.err.println("Hit:\n"+maxHit);
this.resultBuffer.append(maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity() + "\t" + maxR.getScore() + "\n");
this.resultBuffer.append(maxR.toAlignmentString() + "\n");
}
}
/*
public void pathAlign(ArrayList<Path> ps){
int count = 1;
for(Path p : ps){
String candidate = p.toString(this.g, count);//, this.headerExcessLengthBeyondTypingBoundary, this.tailExcessLengthBeyondTypingBoundary);
count++;
String subject = null;
String maxName = null;
int maxIdenticalLen = 0;
Result maxR = null;
for(HLASequence subj : this.typingSequences){
subject = subj.getSequence();
Result curR = NWAlign.runDefault(candidate, subject);
if(curR.getIdenticalLen() >= maxIdenticalLen){
maxIdenticalLen = curR.getIdenticalLen();
maxName = subj.getGroup().getGroupString();
maxR = curR;
if(curR.getIdentity() == 1.0d){
System.err.println("Found perfect match.");
break;
}
}
}
System.err.println("BEST MATCH:\t" + maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity());
this.resultBuffer.append(maxName + "\t" + maxIdenticalLen + "\t" + maxR.getIdentity() + "\t" + maxR.getScore() + "\n");
this.resultBuffer.append(maxR.toAlignmentString() + "\n");
}
}
public ArrayList<Path> generateCandidatePaths(ArrayList<ArrayList<Path>> fracturedPaths){
ArrayList<Path> paths = new ArrayList<Path>();
//add paths of the first superBubble
for(Path p : fracturedPaths.get(0)){
paths.add(p.deepCopy());
}
//for each of next superBubble
for(int i=1; i<fracturedPaths.size(); i++){
ArrayList<Path> otherPaths = fracturedPaths.get(i);
ArrayList<Path> results = new ArrayList<Path>();
//for each current path
for(int j=0; j < paths.size(); j++){
//for each next option
for(int k=0; k < otherPaths.size(); k++){
results.add(paths.get(j).combinePaths(otherPaths.get(k)));
}
}
paths = results;
}
return paths;
}
public void selectBestHits(ArrayList<DNAString> candidates){
ArrayList<Integer> score = new ArrayList<Integer>();
for(DNAString seq:candidates){
score.add(findBestHit(seq));
}
}
public int findBestHit(DNAString seq){
int score = 0;
//run alignment
return score;
}
*/
public ArrayList<Bubble> countBubbles(){
System.err.println("=========================");
System.err.println("= " + this.HLAGeneName);
System.err.println("=========================");
ArrayList<Bubble> bubbles = new ArrayList<Bubble>();
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
/* counters */
int numBubbles = 0;
int curBubbleLength = 1;
int lastStartOfBubble = 0;
//ArrayList<Integer> numPaths = new ArrayList<Integer>();
ArrayList<Integer> bubbleLengths = new ArrayList<Integer>(); // keeps track of bubble lengths. Bubble length is length excluding collapsing nodes. L-2
ArrayList<Integer> coordinates = new ArrayList<Integer>(); //keeps track of start coordinates of bubbles
/* counters */
Node curSNode = null;
this.interBubbleSequences = new ArrayList<StringBuffer>();
//this.interBubblePaths = new ArrayList<Path>();
this.interBubblePaths2 = new ArrayList<TmpPath>();
StringBuffer curbf = new StringBuffer("");
TmpPath tp = new TmpPath();
for(int i=0; i<typingIntervals.size(); i++){
int start = typingIntervals.get(i)[0];
int end = typingIntervals.get(i)[1];
curBubbleLength = 1;
lastStartOfBubble = start - 2;
//boolean headerBubble = false;
boolean firstBubble = true; // to demarcate the first bubble of the interval
//Node preNode = null;
int k;
/* FOR EACH POSITION in a TYPING INTERVAL*/
for(k=start-1;k<end-1;k++){
HashMap<Integer, Node> columnHash = this.nodeHashList.get(k);
Integer[] keys = columnHash.keySet().toArray(new Integer[0]);
/*it's a collapsing node if curBubbleLength > 2
else it's a possible start of bubble.*/
if(keys.length == 1){
//headerBubble = false;
/* then it must be a collapsing node; */
if(curBubbleLength > 1){
this.interBubbleSequences.add(curbf);
//this.interBubblePaths.add(tp.toPath(this.g));
this.interBubblePaths2.add(tp);
//this.interBubblePaths.add(curP);
curBubbleLength++;
numBubbles++;
//numPaths.add(new Integer(this.analyzeBubble(lastStartOfBubble, k)));
bubbleLengths.add(new Integer(curBubbleLength-2));
coordinates.add(new Integer(lastStartOfBubble));
if(firstBubble){
//if(i>0)//if it's not first interval, we need to update last bubble
// bubbles.get(bubbles.size()-1).trimPaths(0,this.tailExcessLengthBeyondTypingBoundary[i-1]);
bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0]), firstBubble, this.headerExcessLengthBeyondTypingBoundary[i], 0));
//bubbles.get(bubbles.size()-1).trimPath(this.headerExcessLengthBeyongTypingBoundary[i], 0);
firstBubble = false;
}else
bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0])));
curSNode = columnHash.get(keys[0]);
//preNode = curSNode;
lastStartOfBubble = k;
curBubbleLength = 1;
//curP = new Path();
curbf = new StringBuffer("");
curbf.append(curSNode.getBase());
tp = new TmpPath();
tp.appendNode(curSNode);
}
/* Possible Start of a Bubble or straight path */
else{
curSNode = columnHash.get(keys[0]);
curbf.append(curSNode.getBase());
tp.appendNode(curSNode);
/*if(prNode == null)
preNode = curSNode;
else{
curP.appendEdge(this.g.getEdge(preNode, curSNode));
preNode = curSNode;
}*/
lastStartOfBubble = k;
curBubbleLength = 1;
}
}else if(keys.length > 1){//middle of bubble
/* NEED TO FIX THIS TO ALLOW BUBBLE TO BE USED at the boundaries*/
if(k==(start-1)){// || headerBubble){
System.err.println("[k] = " + k);
int tmpBubbleLength = 1;
for(int l=start-2;;l
System.err.println("trying new k: [k] = " + l);
tmpBubbleLength++;
HashMap<Integer, Node> tmpHash = this.nodeHashList.get(l);
Integer[] tmpKeys = tmpHash.keySet().toArray(new Integer[0]);
if(tmpKeys.length == 1){
System.err.println("Found the new start!");
curSNode = tmpHash.get(tmpKeys[0]);
curbf.append(curSNode.getBase());// this is actually unecessary
//curbf=new StringBuffer("");
tp.appendNode(curSNode);
lastStartOfBubble = l;
curBubbleLength = tmpBubbleLength;
this.headerExcessLengthBeyondTypingBoundary[i] = curBubbleLength - 1;
System.err.println("Setting Trimming length(header):\t" + this.headerExcessLengthBeyondTypingBoundary[i]);
break;
}
}
//this.interBubbleSequences.add(new StringBuffer(""));
//headerBubble = true;
/*curSNode = columnHash.get(keys[0]);
curbf.append(curSNode.getBase());
tp.appendNode(curSNode);
lastStartOfBubble = k;
curBubbleLength = 1;
*/
}else{ //mid-bubble: just increment bubble length
curBubbleLength++;
//preNode = null;
}
}else{//disconnected graph.
System.err.println("This should NOT HAPPEN");
}
}
//need to update here to handle "End-Bubble" (bubble sitting at the end and not concluded)
if(curBubbleLength > 1){
System.err.println(">>>>>>>Bubble at the end:\t[curBubbleLength]:"+ curBubbleLength);
int preLength = curBubbleLength;
for(;;k++){
HashMap<Integer, Node> columnHash = this.nodeHashList.get(k);
Integer[] keys = columnHash.keySet().toArray(new Integer[0]);
curBubbleLength++;
if(keys.length == 1){
this.interBubbleSequences.add(curbf);
//this.interBubblePaths.add(tp.toPath(this.g));
this.interBubblePaths2.add(tp);
System.err.println("Found the new end!");
numBubbles++;
bubbleLengths.add(new Integer(curBubbleLength-2));
coordinates.add(new Integer(lastStartOfBubble));
//if(firstBubble){
// bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0]), firstBubble));
// firstBubble = false;
//}else
this.tailExcessLengthBeyondTypingBoundary[i] = curBubbleLength - preLength;
System.err.println("Setting Trimming length(tail):\t" + this.tailExcessLengthBeyondTypingBoundary[i]);
bubbles.add(new Bubble(this, curSNode, columnHash.get(keys[0]), false, 0, this.tailExcessLengthBeyondTypingBoundary[i]));
curSNode = columnHash.get(keys[0]);
lastStartOfBubble = k;
curBubbleLength = 1;
curbf = new StringBuffer("");
curbf.append(curSNode.getBase());
tp = new TmpPath();
tp.appendNode(curSNode);
break;
}
}
}//else{
this.interBubbleSequences.add(curbf);
//this.interBubblePaths.add(tp.toPath(this.g));
this.interBubblePaths2.add(tp);
curbf = new StringBuffer("");
tp = new TmpPath();
/*
this.interBubbleSequences.add(curbf);
this.interBubblePaths.add(tp.toPath(this.g));
curbf = new StringBuffer("");
tp = new TmpPath();
if(curBubbleLength > 1){
System.err.println(">>>>>>>Bubble at the end:\t[curBubbleLength]:"+ curBubbleLength);
}
*/
}
System.err.println("NumBubbles:\t" + numBubbles + "\tfound");
for(int i=0; i<bubbleLengths.size(); i++){
System.err.print(bubbleLengths.get(i).intValue() + "\t");
}
System.err.println();
for(int i=0; i<bubbleLengths.size(); i++){
System.err.print(coordinates.get(i).intValue() + "\t");
}
System.err.println();
return bubbles;
}
//write code to find number of paths and
//return the number of paths in the bubble.
//move column-wise and update number of paths going through each vertex.
/*
private int analyzeBubble(int start, int end){
Integer[] keys = this.nodeHashList.get(start).keySet().toArray(new Integer[0]);
for(int i=start+1; i<=end; i++){
//HashMap<Integer, Node> columnHash = this.nodeHashList.get(i);
//Integer[] keys = columnHash.keySet().toArray(new Integer[0]);
this.updateNumPathFwd(i-1, i);
}
return 0;
}*/
//update numPathFwd in current column
/*
private void updateNumPathFwd(int pre, int cur){
Collection<Node> preNodes = this.nodeHashList.get(pre).values();
Collection<Node> curNodes = this.nodeHashList.get(cur).values();
Iterator<Node> curItr = curNodes.iterator();
while(curItr.hasNext()){
Node curNode = curItr.next();
Iterator<Node> preItr = preNodes.iterator();
while(preItr.hasNext()){
Node preNode = preItr.next();
if(this.g.getEdge(preNode, curNode) != null){
curNode.incrementNumPathInBubbleFwd(preNode.getNumInBubbleFwd());
}
}
}
}
*/
public void countBubbles(boolean typingExonOnly){
int startIndex, endIndex;
if(typingExonOnly){
int[] boundaries = this.alleles.get(0).getBoundaries();
if(this.alleles.get(0).isClassI()){//if class I : type exon 2 and 3
startIndex = boundaries[3];
endIndex = boundaries[6];
}else{// if class II : type exon 2
startIndex = boundaries[3];
endIndex = boundaries[4];
}
}else{
startIndex = 0;
endIndex = this.nodeHashList.size();
}
int numBubbles = 0;
Node sNode = new Node(4, startIndex);
ArrayList<Node> preNodes = new ArrayList<Node>();
preNodes.add(sNode);
boolean preStart = true;
int bubbleSize = 1;
int numPath = 1;
for(int i = startIndex; i <= endIndex; i++){
HashMap<Integer, Node> curHash = this.nodeHashList.get(i);
//Set<Integer> keyset = curHash.keySet();
Integer[] keys = curHash.keySet().toArray(new Integer[0]);
if(keys.length == 1){//only one option --> it's collaping node or part of just a straight path
if(bubbleSize > 1){//if bublleSize > 1, then it's the end end of bubble
numBubbles++;
System.err.println("Bubble[" + numBubbles + "]:Size(" + bubbleSize + "):numPath(" + numPath + ")" );
preNodes = new ArrayList<Node>();
preNodes.add(curHash.get(keys[0]));
preStart = false;
bubbleSize = 1;
numPath = 1;
}else{
preNodes = new ArrayList<Node>();
preStart = false;
}
}else if(keys.length > 1){
//checking previous column nodes to this column node
for(int p=0; p < preNodes.size(); p++){
Node pNode = preNodes.get(p);
int branching=0;
for(int q=0; q<keys.length; q++){
Node qNode = curHash.get(keys[q]);
CustomWeightedEdge e = this.g.getEdge(pNode, qNode);
if(e != null && this.g.getEdgeWeight(e) > 0)
branching++;
}
if(branching > 2){
if(preStart){
numPath += (branching - 1);
}else{
int ind = this.g.inDegreeOf(pNode);
numPath += ind*branching - ind;
}
}
}
}
}
}
//insertionNodes are indexed at same position as endColumns
//meaning: insertionNodes should be inserted in between startColumns and endColumns.
public void flattenInsertionNodes(){
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
int fCount = 0;
for(int i=typingIntervals.size()-1; i>-1; i
int start = typingIntervals.get(i)[0];
int end = typingIntervals.get(i)[1];
for(int j=end-1; j >= start; j
int insSize = this.insertionNodeHashList.get(j).size();
//there is insertion, we need to flatten.
if(insSize > 0 && this.isThereConnectionToInsertionNodes(insSize, j)){
fCount++;
this.shiftColumnsByInsertionSize(insSize, j);
}
}
}
System.err.println(this.HLAGeneName + "\t>>>>> FLATTENED InsertionBubble:\t" + fCount );
}
//fromColumnIndex is 0-based columnIndex
private boolean isThereConnectionToInsertionNodes(int insSize, int fromColumnIndex){
System.err.println("[isThereConnection] Checking at fromColumnIndex : " + fromColumnIndex + "\tInsSize: " + insSize);
HashMap<Integer, Node> startNodes = nodeHashList.get(fromColumnIndex-1);
boolean sConnection = false;
boolean eConnection = false;
HashMap<Integer, Node> sInsHash = this.insertionNodeHashList.get(fromColumnIndex).get(0);
HashMap<Integer, Node> eInsHash = this.insertionNodeHashList.get(fromColumnIndex).get(insSize - 1);
HashMap<Integer, Node> endNodes = nodeHashList.get(fromColumnIndex);
System.out.println("[isThereConnectionToInsertionNodes] HashIndex: " + (fromColumnIndex - 1) );
sConnection = this.isThereConnection(startNodes, sInsHash);
eConnection = this.isThereConnection(eInsHash, endNodes);
if(sConnection || eConnection){
if(sConnection)
System.err.println("[isThereConnection] connection between startNodes and sInsHash found!");
else
System.err.println("[isThereConnection] NO connection between startNodes and sInsHash found!");
if(eConnection)
System.err.println("[isThereConnection] connection between eInsHash and endNodes found!");
else
System.err.println("[isThereConnection] NO connection between eInsHash and endNodes found!");
}
return sConnection && eConnection;
}
//just to check if there edges between s and t
private boolean isThereConnection(HashMap<Integer, Node> s, HashMap<Integer, Node> t){
Integer[] sKeys = new Integer[0];
sKeys = s.keySet().toArray(sKeys);
Integer[] eKeys = new Integer[0];
eKeys = t.keySet().toArray(eKeys);
for(int i=0;i<sKeys.length; i++){
if(sKeys[i].intValue() != 4){
for(int j=0; j<eKeys.length; j++){
//System.err.print("eKyes[j] intval\t");
//System.err.println(eKeys[j].intValue());
if(eKeys[j].intValue() != 4){
//System.out.println("Actual index value in node: " + s.get(sKeys[i]).getColIndex());
CustomWeightedEdge e = this.g.getEdge(s.get(sKeys[i]), t.get(eKeys[j]));
if(e != null)
return true;
}
}
}
}
return false;
}
/* fromColumnIndex is 0-based index --> this is where insertion happens */
/* 0based(List index): 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 */
/* 1based(CI in Node and Base): 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 */
/* from ColumnIndex at 5, insSize of 2*/
private void shiftColumnsByInsertionSize(int insSize, int fromColumnIndex){
HashMap<Integer, Node> startNodes = nodeHashList.get(fromColumnIndex-1);
HashMap<Integer, Node> endNodes = nodeHashList.get(fromColumnIndex);
Iterator<Integer> itr_s = startNodes.keySet().iterator();
System.err.println("\n**STARTNODES:");
while(itr_s.hasNext()){
System.err.println(startNodes.get(itr_s.next()).toString());
}
Iterator<Integer> itr_e = endNodes.keySet().iterator();
System.err.println("\n**ENDNODES:");
while(itr_e.hasNext()){
System.err.println(endNodes.get(itr_e.next()).toString());
}
Node pre = null;
Node[] gapNodes = new Node[insSize];
/* here we first shift endNodes and all nodes after that by insSize
* to acquire insSize many column space for insertionNodeHashList.
*/
for(int i=0; i<insSize;i++){
HashMap<Integer, Node> insHash_i = this.insertionNodeHashList.get(fromColumnIndex).get(i);
this.adjustColumnIndex(insHash_i, fromColumnIndex + i + 1);//1-base column position
nodeHashList.add(fromColumnIndex+i, insHash_i); //insert insHash_i
Node cur = new Node('.', fromColumnIndex + i + 1); // 1-base column position
this.addVertex(cur);//add vertex and add it to nodeHashList;
if(pre !=null)
this.g.addEdge(pre,cur);
gapNodes[i] = cur;
pre = cur;
}
System.err.println("checking edges between gapNodes[]");
for(int i=1;i<gapNodes.length;i++){
CustomWeightedEdge e = this.g.getEdge(gapNodes[i-1], gapNodes[i]);
if(e == null)
System.err.println("No edges found between between gapNodes["+(i-1) + "] and gapNodes[" + i + "]");
}
/* adding spaces to Alleles as well */
for(int i=0; i<this.alleles.size(); i++)
this.alleles.get(i).insertBlanks(fromColumnIndex, insSize);
/* we shift all columns after insertion, so updating all columnIndex */
for(int i=fromColumnIndex+insSize; i<this.nodeHashList.size(); i++)
this.adjustColumnIndex(this.nodeHashList.get(i), i+1);//need to updated with 1-base column position
/* remove all edges between start node and end nodes and re-route them through gap nodes by adding new edges and assign weights and readset accordingly*/
double weightSum = this.getWeightSumsBetween2Columns(startNodes, endNodes, gapNodes);
/* DEBUGGING prints*/
itr_s = startNodes.keySet().iterator();
System.err.println("\n**STARTNODES:");
while(itr_s.hasNext()){
System.err.println(startNodes.get(itr_s.next()).toString());
}
System.err.println("**CONNECTED NODES TO START-GAP:");
CustomWeightedEdge[] inEdges = this.g.incomingEdgesOf(gapNodes[0]).toArray(new CustomWeightedEdge[1]);
for(CustomWeightedEdge e : inEdges){
System.err.println(this.g.getEdgeSource(e).toString());
}
itr_e = endNodes.keySet().iterator();
System.err.println("\n**ENDNODES:");
while(itr_e.hasNext()){
System.err.println(endNodes.get(itr_e.next()).toString());
}
System.err.println("**CONNECTED NODES TO END-GAP:");
CustomWeightedEdge[] outEdges = this.g.outgoingEdgesOf(gapNodes[gapNodes.length -1]).toArray(new CustomWeightedEdge[1]);
for(CustomWeightedEdge e : outEdges){
System.err.println(this.g.getEdgeTarget(e).toString());
}
}
private void shiftColumnsByInsertionSizeOLD(int insSize, int fromColumnIndex){
HashMap<Integer, Node> startNodes = nodeHashList.get(fromColumnIndex-2);
HashMap<Integer, Node> endNodes = nodeHashList.get(fromColumnIndex-1);
//we need to insert <insSize>-many columns first
Node pre = null;
Node[] gapNodes = new Node[insSize];
ArrayList<Base> insBases = new ArrayList<Base>();//new Base[insSize];
//insert insSize-many columns with gapNodes and transfer insertionNodes to nodeHashList.
for(int i=0; i<insSize; i++){
//add a space first then add the vertex --> gets the space(HashMap) from insertionNodeHashList
HashMap<Integer, Node> insHash_i = this.insertionNodeHashList.get(fromColumnIndex-1).get(i);
this.adjustColumnIndex(insHash_i, fromColumnIndex + i);//this.adjustColumnIndex(insHash_i, fromColumnIndex + i + 1);
nodeHashList.add(fromColumnIndex + i, insHash_i);
Node cur = new Node('.', fromColumnIndex + i + 1);
this.addVertex(cur);//add vertex and add to nodeHashList
if(pre != null)
this.g.addEdge(pre, cur);
gapNodes[i] = cur;
pre = cur;
insBases.add(new Base('-', 0,0,0,true,1));
}
/* adding spaces to Alleles as well*/
for(int i=0; i<this.alleles.size(); i++){
//this.alleles.get(i).insertBlanks(fromColumnIndex, insBases);
this.alleles.get(i).insertBlanks(fromColumnIndex-1, insSize);
}
/*
//insert insSize-many columns with gapNodes
for(int i=0; i<insSize; i++){
//add a space first then add the vertex
nodeHashList.add(fromColumnIndex + i, new HashMap<Integer, Node>);
Node cur = new Node('.', fromColumnIndex + i + 1);
this.addVertex(cur);//add vertex and add to nodeHashList
if(pre != null)
this.g.addEdge(pre, cur);
gapNodes[i] = cur;
pre = cur;
}
*/
//NEED TO SHIFT all columns after insertion, so updating all columnIndex (originalIndex+insSize.
for(int i=fromColumnIndex+insSize; i<this.nodeHashList.size(); i++)
this.adjustColumnIndex(this.nodeHashList.get(i), i);//this.adjustColumnIndex(i);
//remove all edges between start nodes and end nodes and add new edges connecting through gap nodes.
double weightSum = this.getWeightSumsBetween2Columns(startNodes, endNodes, gapNodes);
if(insSize > 1){
for(int i=fromColumnIndex; i<fromColumnIndex+insSize-1; i++){
gapNodes = Arrays.copyOfRange(gapNodes, 1, gapNodes.length);
this.getWeightSumsBetween2Columns(this.nodeHashList.get(i), endNodes, gapNodes);
}
}
}
//removes all edges betweend start nodes and end nodes
//connect edges to newly added gap nodes with correct weights
private double getWeightSumsBetween2Columns(HashMap<Integer, Node> start, HashMap<Integer, Node> end, Node[] gapNodes){
Node sGap = gapNodes[0];
Node eGap = gapNodes[gapNodes.length-1];
double[] outweight = new double[6]; /* for each nucleotide */
//ArrayList<Byte>[] outFScore = new ArrayList<Byte>[5];
//ArrayList<Byte>[] outRScore = new ArrayList<Byte>[5];
ArrayList<ArrayList<Byte>> outFScore = new ArrayList<ArrayList<Byte>>();
ArrayList<ArrayList<Byte>> outRScore = new ArrayList<ArrayList<Byte>>();
double[] inweight = new double[6];
//ArrayList<Byte>[] inFScore = new ArrayList<Byte>[5];
//ArrayList<Byte>[] inRScore = new ArrayList<Byte>[5];
ArrayList<ArrayList<Byte>> inFScore = new ArrayList<ArrayList<Byte>>();
ArrayList<ArrayList<Byte>> inRScore = new ArrayList<ArrayList<Byte>>();
//ArrayList<HashSet<Integer>> outRHash = new ArrayList<HashSet<Integer>>();
//ArrayList<HashSet<Integer>> inRHash = new ArrayList<HashSet<Integer>>();
ArrayList<CustomHashMap> outRHash = new ArrayList<CustomHashMap>();
ArrayList<CustomHashMap> inRHash = new ArrayList<CustomHashMap>();
//for each nucleotide
for(int i=0; i<6; i++){
outFScore.add(new ArrayList<Byte>());
outRScore.add(new ArrayList<Byte>());
inFScore.add(new ArrayList<Byte>());
inRScore.add(new ArrayList<Byte>());
//outRHash.add(new HashSet<Integer>());
//inRHash.add(new HashSet<Integer>());
outRHash.add(new CustomHashMap());
inRHash.add(new CustomHashMap());
}
double sum = 0.0d;
//HashSet<Integer> rHashForGapNodes = new HashSet<Integer>();
CustomHashMap rHashForGapNodes = new CustomHashMap();//new HashSet<Integer>();
Integer[] sKeys = new Integer[0];
Integer[] eKeys = new Integer[0];
sKeys = start.keySet().toArray(sKeys);
eKeys = end.keySet().toArray(eKeys);
/*
for(int i=0;i<eKeys.length; i++){
rHashForGapNodes.addAll(end.get(eKeys[i].intValue()).getReadHashSet());
}*/
boolean[] sEdgePresent = new boolean[6];
boolean[] eEdgePresent = new boolean[6];
boolean isThereConnection = false;
//check all edges between starNodes and endNodes and sum up baseWise.
for(int i=0; i < sKeys.length; i++){
int sVal = sKeys[i].intValue();
//if(sVal != 4){//edges between gap nodes are skipped, taken care of separately
Node stNode = start.get(sKeys[i]);
for(int j=0; j < eKeys.length; j++){
int eVal = eKeys[j].intValue();
//if(eVal != 4){//edges between gap nodes are skipped, taken care of separately
Node eNode = end.get(eKeys[j]);
CustomWeightedEdge e = this.g.getEdge(stNode, eNode);
if(e != null){
sEdgePresent[sVal] = true;
eEdgePresent[eVal] = true;
isThereConnection = true;
double w = this.g.getEdgeWeight(e);
outweight[sVal] += w;
outFScore.get(sVal).addAll(e.getFScores());
outRScore.get(sVal).addAll(e.getRScores());
inweight[eVal] += w;
inFScore.get(eVal).addAll(e.getFScores());
inRScore.get(eVal).addAll(e.getRScores());
outRHash.get(sVal).addAll(e.getReadHashSet());
inRHash.get(eVal).addAll(e.getReadHashSet());
rHashForGapNodes.addAll(e.getReadHashSet());
sum += w;
this.g.removeEdge(e);
}
}
}
//we only need to add edges if there were edges between start and end
if(isThereConnection){
//setting outgoing edges from start nodes to newly added gapNode( sGap ).
for(int i=0; i<sKeys.length; i++){
if(sEdgePresent[sKeys[i].intValue()]){
Node stNode = start.get(sKeys[i]);
CustomWeightedEdge e = this.g.getEdge(stNode, sGap);
if(e == null){
e = this.g.addEdge(stNode, sGap);
this.g.setEdgeWeight(e, 0.0d);
}
this.g.setEdgeWeight(e, this.g.getEdgeWeight(e) + outweight[sKeys[i].intValue()]);//this.setEdgeWeight(e, outweight[sKeys[i].intValue()]);
e.addAllReadsFrom(outRHash.get(sKeys[i].intValue()));
e.addAllFScores(outFScore.get(sKeys[i].intValue()));
e.addAllRScores(outRScore.get(sKeys[i].intValue()));
}
}
//setting incoming edges from newly added gapNode( eGap ) to end nodes.
for(int i=0; i<eKeys.length; i++){
if(eEdgePresent[eKeys[i].intValue()]){
Node eNode = end.get(eKeys[i]);
CustomWeightedEdge e = this.g.getEdge(eGap, eNode);//this.g.addEdge(eGap, eNode);
if(e == null){
e = this.g.addEdge(eGap, eNode);
this.g.setEdgeWeight(e, 0.0d);
}
this.g.setEdgeWeight(e, this.g.getEdgeWeight(e) + inweight[eKeys[i].intValue()]);
e.addAllReadsFrom(inRHash.get(eKeys[i].intValue()));
e.addAllFScores(inFScore.get(eKeys[i].intValue()));
e.addAllRScores(inRScore.get(eKeys[i].intValue()));
}
}
//set edgeWeight between newly inserted gap nodes.
//and add read identifiers to gapNodes
for(int i=0; i<gapNodes.length; i++){
if(i>0){
CustomWeightedEdge e = this.g.getEdge(gapNodes[i-1], gapNodes[i]);
this.g.setEdgeWeight(e, this.g.getEdgeWeight(e) + sum);//this.g.getEdge(gapNodes[i-1], gapNodes[i]), sum);
e.addAllReadsFrom(rHashForGapNodes);
}
//gapNodes[i].addAllReadsFrom(rHashForGapNodes);
}
}
return sum;
}
//set columnIndex to newIndex.
/*
private void adjustColumnIndex(int newIndex){
HashMap<Integer, Node> curHash = this.nodeHashList.get(newIndex);
Iterator<Integer> keys = curHash.keySet().iterator();
while(keys.hasNext())
curHash.get(keys.next()).setColIndex(newIndex);
}
*/
private void adjustColumnIndex(HashMap<Integer, Node> hash, int newIndex){
Iterator<Integer> keys = hash.keySet().iterator();
while(keys.hasNext())
hash.get(keys.next()).setColIndex(newIndex);
}
public void removeUnused(){
this.removeUnusedEdges();
this.removeUnusedVertices();
}
/* remove low frequency edges */
private void removeUnusedEdges(){
Iterator<CustomWeightedEdge> itr = this.g.edgeSet().iterator();
CustomWeightedEdge e = null;
ArrayList<CustomWeightedEdge> removalList = new ArrayList<CustomWeightedEdge>();
while(itr.hasNext()){
e = itr.next();
if(this.g.getEdgeWeight(e) < 1.0d){
removalList.add(e);//this.g.removeEdge(e);
}
}
System.err.println(this.HLAGeneName +"\t:removed\t" + removalList.size() + "\tEdges." );
for(int i=0; i<removalList.size(); i++){
this.g.removeEdge(removalList.get(i));
}
}
/* remove island vertices */
private void removeUnusedVertices(){
Iterator<Node> itr = this.g.vertexSet().iterator();
Node n = null;
ArrayList<Node> removalList = new ArrayList<Node>();
while(itr.hasNext()){
n = itr.next();
//we dont remove sNode and tNode
if(!n.equals(this.sNode) && !n.equals(this.tNode)){
if(this.g.inDegreeOf(n) ==0 && this.g.outDegreeOf(n) == 0){//this.g.degreeOf(n) < 1){
removalList.add(n);
//this.removeVertexFromNodeHashList(n);
//this.g.removeVertex(n);
}
}
}
System.err.println(this.HLAGeneName +"\t:removed\t" + removalList.size() + "\tVertices." );
for(int i=0; i<removalList.size(); i++){
//System.err.println("\t" + removalList.get(i).toString());
this.removeVertex(removalList.get(i));
//this.removeVertexFromNodeHashList(removalList.get(i));
//this.g.removeVertex(removalList.get(i));
}
}
//removing stems. (unreachable stems and dead-end stems)
/* remove any stems */
public void removeStems(){
ArrayList<int[]> typingIntervals = this.obtainTypingIntervals();
//Set<Node> vSet = this.g.vertexSet();
Node[] nodes = this.g.vertexSet().toArray(new Node[0]);//new Node[vSet.size()];
HashSet<Node> dNodes = new HashSet<Node>();
Node n = null;
int terminalStem = 0;
int unreachableStem = 0;
for(int i=0; i<nodes.length; i++){
n = nodes[i];
if(!n.equals(this.sNode) && !n.equals(this.tNode) && !dNodes.contains(n)){
if(this.g.outDegreeOf(n) == 0 && this.g.inDegreeOf(n) == 1){
int stemSize = 0;
terminalStem++;
Node curNode = n;
while(true){
if(!this.alleles.get(0).withinTypingRegion(curNode, typingIntervals))
;//System.err.println("NOT IN TYPING INTERVAL!!");
else
System.err.print("YES! IN TYPING INTERVAL!!");
stemSize++;
CustomWeightedEdge e = this.g.incomingEdgesOf(curNode).toArray(new CustomWeightedEdge[1])[0];
System.err.print("\t" + this.g.getEdgeWeight(e));
Node nextNode = this.g.getEdgeSource(e);
dNodes.add(curNode);
this.removeVertex(curNode);
if(this.g.outDegreeOf(nextNode) == 0 && this.g.inDegreeOf(nextNode) == 1)
curNode = nextNode;
else
break;
}
System.err.println("[DE]stemSize:\t" + stemSize);
}
else if(this.g.outDegreeOf(n) == 1 && this.g.inDegreeOf(n) == 0){
int stemSize = 0;
unreachableStem++;
Node curNode = n;
while(true){
if(!this.alleles.get(0).withinTypingRegion(curNode, typingIntervals))
;//System.err.println("NOT IN TYPING INTERVAL!!");
else
System.err.println("YES! IN TYPING INTERVAL!!");
stemSize++;
CustomWeightedEdge e = this.g.outgoingEdgesOf(curNode).toArray(new CustomWeightedEdge[1])[0];
System.err.print("\t" + this.g.getEdgeWeight(e));
Node nextNode = this.g.getEdgeTarget(e);
dNodes.add(curNode);
this.removeVertex(curNode);
if(this.g.outDegreeOf(nextNode) == 1 && this.g.inDegreeOf(nextNode) == 0)
curNode = nextNode;
else
break;
}
System.err.println("[UN]stemSize:\t" + stemSize);
}
}
}
System.err.println(this.HLAGeneName + "\t:removed\t[DE]:" + terminalStem + "\t[UN]:" + unreachableStem + "\t[NumVertices]:" + dNodes.size());
}
public void countStems(){
Iterator<Node> itr = this.g.vertexSet().iterator();
Node n = null;
int terminalType = 0;
int startType = 0;
while(itr.hasNext()){
n = itr.next();
if(!n.equals(this.sNode) && !n.equals(this.tNode)){
if(this.g.inDegreeOf(n) == 1 && this.g.outDegreeOf(n) == 0){
terminalType++;
}else if(this.g.inDegreeOf(n) == 0 && this.g.outDegreeOf(n) == 1){
startType++;
System.err.println("startType:\t" + n.toString());
}
}
}
System.err.println("Stems\t" + terminalType + "\t" + startType);
}
/*
private void initNumPathForColumn(HashMap){
}*/
}
|
package org.hbase.async.test;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Method;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collection;
import org.jboss.netty.logging.InternalLoggerFactory;
import org.jboss.netty.logging.Slf4JLoggerFactory;
import org.slf4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.Description;
import org.junit.runner.JUnitCore;
import org.junit.runner.Request;
import org.junit.runner.Result;
import org.junit.runner.notification.Failure;
import org.junit.runner.notification.RunListener;
import org.powermock.reflect.Whitebox;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import com.stumbleupon.async.Callback;
import com.stumbleupon.async.Deferred;
import com.stumbleupon.async.DeferredGroupException;
import org.hbase.async.AtomicIncrementRequest;
import org.hbase.async.Bytes;
import org.hbase.async.ColumnPrefixFilter;
import org.hbase.async.ColumnRangeFilter;
import org.hbase.async.DeleteRequest;
import org.hbase.async.FilterList;
import org.hbase.async.GetRequest;
import org.hbase.async.HBaseClient;
import org.hbase.async.KeyRegexpFilter;
import org.hbase.async.KeyValue;
import org.hbase.async.NoSuchColumnFamilyException;
import org.hbase.async.PutRequest;
import org.hbase.async.ScanFilter;
import org.hbase.async.Scanner;
import org.hbase.async.TableNotFoundException;
import org.hbase.async.test.Common;
/**
* Basic integration and regression tests for asynchbase.
*
* Requires a locally running HBase cluster.
*/
final public class TestIntegration {
private static final Logger LOG = Common.logger(TestIntegration.class);
private static final short FAST_FLUSH = 10; // milliseconds
private static final short SLOW_FLUSH = 1000; // milliseconds
/** Path to HBase home so we can run the HBase shell. */
private static final String HBASE_HOME;
static {
HBASE_HOME = System.getenv("HBASE_HOME");
if (HBASE_HOME == null) {
throw new RuntimeException("Please set the HBASE_HOME environment"
+ " variable.");
}
final File dir = new File(HBASE_HOME);
if (!dir.isDirectory()) {
throw new RuntimeException("No such directory: " + HBASE_HOME);
}
}
/** Whether or not to truncate existing tables during tests. */
private static final boolean TRUNCATE =
System.getenv("TEST_NO_TRUNCATE") == null;
private static String table;
private static String family;
private static String[] args;
private HBaseClient client;
public static void main(final String[] args) throws Exception {
preFlightTest(args);
table = args[0];
family = args[1];
TestIntegration.args = args;
LOG.info("Starting integration tests");
final JUnitCore junit = new JUnitCore();
final JunitListener listener = new JunitListener();
junit.addListener(listener);
final String singleTest = System.getenv("TEST_NAME");
final Request req;
if (singleTest != null) {
req = Request.method(TestIntegration.class, singleTest);
} else {
req = Request.aClass(TestIntegration.class);
}
final Result result = junit.run(req);
LOG.info("Ran " + result.getRunCount() + " tests in "
+ result.getRunTime() + "ms");
if (!result.wasSuccessful()) {
LOG.error(result.getFailureCount() + " tests failed: "
+ result.getFailures());
System.exit(1);
}
LOG.info("All tests passed!");
}
@Before
public void setUp() {
client = Common.getOpt(TestIntegration.class, args);
}
@After
public void tearDown() throws Exception {
client.shutdown().join();
}
/** Ensures the table/family we use for our test exists. */
private static void preFlightTest(final String[] args) throws Exception {
final HBaseClient client = Common.getOpt(TestIncrementCoalescing.class,
args);
try {
createOrTruncateTable(client, args[0], args[1]);
} finally {
client.shutdown().join();
}
}
/** Creates or truncates the given table name. */
private static void createOrTruncateTable(final HBaseClient client,
final String table,
final String family)
throws Exception {
try {
client.ensureTableFamilyExists(table, family).join();
truncateTable(table);
} catch (TableNotFoundException e) {
createTable(table, family);
createOrTruncateTable(client, table, family); // Check again.
}
}
/** Write a single thing to HBase and read it back. */
@Test
public void putRead() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final double write_time = System.currentTimeMillis();
final PutRequest put = new PutRequest(table, "k", family, "q", "val");
final GetRequest get = new GetRequest(table, "k", family, "q");
client.put(put).join();
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
final KeyValue kv = kvs.get(0);
assertEq("k", kv.key());
assertEq(family, kv.family());
assertEq("q", kv.qualifier());
assertEq("val", kv.value());
final double kvts = kv.timestamp();
assertEquals(write_time, kvts, 5000.0); // Within five seconds.
}
/** Write a single thing to HBase and read it back, delete it, read it. */
@Test
public void putReadDeleteRead() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put = new PutRequest(table, "k", family, "q", "val");
final GetRequest get = new GetRequest(table, "k", family, "q");
client.put(put).join();
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
assertEq("val", kvs.get(0).value());
final DeleteRequest del = new DeleteRequest(table, "k", family, "q");
client.delete(del).join();
final ArrayList<KeyValue> kvs2 = client.get(get).join();
assertSizeIs(0, kvs2);
}
/**
* Write two values to a HBase column and read them back,
* delete one, and read back the other.
*/
@Test
public void putReadDeleteAtTimestamp() throws Exception {
client.setFlushInterval(FAST_FLUSH);
byte[] t = table.getBytes();
byte[] k = "kprd@ts".getBytes();
byte[] f = family.getBytes();
// Make the qualifier unique to avoid running into HBASE-9879.
byte[] q = ("q" + System.currentTimeMillis()
+ "-" + System.nanoTime()).getBytes();
byte[] v1 = "val1".getBytes();
byte[] v2 = "val2".getBytes();
final PutRequest put1 = new PutRequest(t, k, f, q, v1, 100L);
final PutRequest put2 = new PutRequest(t, k, f, q, v2, 200L);
client.put(put1).join();
client.put(put2).join();
final GetRequest get = new GetRequest(t, k, f, q).maxVersions(2);
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(2, kvs);
assertEq("val2", kvs.get(0).value());
assertEq("val1", kvs.get(1).value());
final DeleteRequest del = new DeleteRequest(t, k, f, q, 200L);
del.setDeleteAtTimestampOnly(true);
client.delete(del).join();
final ArrayList<KeyValue> kvs2 = client.get(get).join();
assertSizeIs(1, kvs2);
assertEq("val1", kvs2.get(0).value());
}
/** Basic scan test. */
@Test
public void basicScan() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put1 = new PutRequest(table, "s1", family, "q", "v1");
final PutRequest put2 = new PutRequest(table, "s2", family, "q", "v2");
final PutRequest put3 = new PutRequest(table, "s3", family, "q", "v3");
Deferred.group(client.put(put1), client.put(put2),
client.put(put3)).join();
// Scan the same 3 rows created above twice.
for (int i = 0; i < 2; i++) {
LOG.info("------------ iteration
final Scanner scanner = client.newScanner(table);
scanner.setStartKey("s0");
scanner.setStopKey("s9");
// Callback class to keep scanning recursively.
class cb implements Callback<Object, ArrayList<ArrayList<KeyValue>>> {
private int n = 0;
public Object call(final ArrayList<ArrayList<KeyValue>> rows) {
if (rows == null) {
return null;
}
n++;
try {
assertSizeIs(1, rows);
final ArrayList<KeyValue> kvs = rows.get(0);
final KeyValue kv = kvs.get(0);
assertSizeIs(1, kvs);
assertEq("s" + n, kv.key());
assertEq("q", kv.qualifier());
assertEq("v" + n, kv.value());
return scanner.nextRows(1).addCallback(this);
} catch (AssertionError e) {
// Deferred doesn't catch Errors on purpose, so transform any
// assertion failure into an Exception.
throw new RuntimeException("Asynchronous failure", e);
}
}
}
try {
scanner.nextRows(1).addCallback(new cb()).join();
} finally {
scanner.close().join();
}
}
}
/** Scan with multiple qualifiers. */
@Test
public void scanWithQualifiers() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put1 = new PutRequest(table, "k", family, "a", "val1");
final PutRequest put2 = new PutRequest(table, "k", family, "b", "val2");
final PutRequest put3 = new PutRequest(table, "k", family, "c", "val3");
Deferred.group(client.put(put1), client.put(put2),
client.put(put3)).join();
final Scanner scanner = client.newScanner(table);
scanner.setFamily(family);
scanner.setQualifiers(new byte[][] { { 'a' }, { 'c' } });
final ArrayList<ArrayList<KeyValue>> rows = scanner.nextRows(2).join();
assertSizeIs(1, rows);
final ArrayList<KeyValue> kvs = rows.get(0);
assertSizeIs(2, kvs);
assertEq("val1", kvs.get(0).value());
assertEq("val3", kvs.get(1).value());
}
/** Write a few KVs and delete them in one batch */
@Test
public void multiDelete() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put2 = new PutRequest(table, "mdk1", family, "q2", "val2");
client.put(put2).join();
final PutRequest put3 = new PutRequest(table, "mdk2", family, "q3", "val3");
client.put(put3).join();
final PutRequest put1 = new PutRequest(table, "mdk1", family, "q1", "val1");
client.put(put1).join();
final DeleteRequest del2 = new DeleteRequest(table, "mdk1", family, "q2");
final DeleteRequest del3 = new DeleteRequest(table, "mdk2", family, "q3");
final DeleteRequest del1 = new DeleteRequest(table, "mdk1", family, "q1");
Deferred.group(client.delete(del2), client.delete(del3),
client.delete(del1)).join();
GetRequest get = new GetRequest(table, "mdk1");
ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(0, kvs);
get = new GetRequest(table, "mdk2");
kvs = client.get(get).join();
assertSizeIs(0, kvs);
}
/** Write a few KVs in different regions and delete them in one batch */
@Test
public void multiRegionMultiDelete() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final String table1 = args[0] + "1";
final String table2 = args[0] + "2";
createOrTruncateTable(client, table1, family);
createOrTruncateTable(client, table2, family);
final PutRequest put2 = new PutRequest(table1, "mdk1", family, "q2", "val2");
client.put(put2).join();
final PutRequest put3 = new PutRequest(table1, "mdk2", family, "q3", "val3");
client.put(put3).join();
final PutRequest put1 = new PutRequest(table2, "mdk1", family, "q1", "val1");
client.put(put1).join();
final DeleteRequest del2 = new DeleteRequest(table1, "mdk1", family, "q2");
final DeleteRequest del3 = new DeleteRequest(table1, "mdk2", family, "q3");
final DeleteRequest del1 = new DeleteRequest(table2, "mdk1", family, "q1");
Deferred.group(client.delete(del2), client.delete(del3),
client.delete(del1)).join();
GetRequest get = new GetRequest(table1, "mdk1");
ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(0, kvs);
get = new GetRequest(table1, "mdk2");
kvs = client.get(get).join();
assertSizeIs(0, kvs);
get = new GetRequest(table2, "mdk1");
kvs = client.get(get).join();
assertSizeIs(0, kvs);
}
/** Attempt to write a column family that doesn't exist. */
@Test
public void putNonexistentFamily() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put = new PutRequest(table, "k", family + family,
"q", "val");
try {
client.put(put).join();
} catch (NoSuchColumnFamilyException e) {
assertEquals(put, e.getFailedRpc());
return;
}
throw new AssertionError("Should never be here");
}
/** Send a bunch of edits with one that references a non-existent family. */
@Test
public void multiPutWithOneBadRpcInBatch() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put1 = new PutRequest(table, "mk1", family, "m1", "mpb1");
// The following edit is destined to a non-existent family.
final PutRequest put2 = new PutRequest(table, "mk2", family + family,
"m2", "mpb2");
final PutRequest put3 = new PutRequest(table, "mk3", family, "m3", "mpb3");
try {
final ArrayList<Deferred<Object>> ds = new ArrayList<Deferred<Object>>(3);
ds.add(client.put(put1));
ds.add(client.put(put2));
ds.add(client.put(put3));
Deferred.groupInOrder(ds).join();
} catch (DeferredGroupException e) {
final ArrayList<Object> results = e.results();
final Object res2 = results.get(1);
if (!(res2 instanceof NoSuchColumnFamilyException)) {
throw new AssertionError("res2 wasn't a NoSuchColumnFamilyException: "
+ res2);
}
assertEquals(put2, ((NoSuchColumnFamilyException) res2).getFailedRpc());
final GetRequest get1 = new GetRequest(table, "mk1", family, "m1");
ArrayList<KeyValue> kvs = client.get(get1).join();
assertSizeIs(1, kvs);
assertEq("mpb1", kvs.get(0).value());
final GetRequest get2 = new GetRequest(table, "mk2", family, "m2");
assertSizeIs(0, client.get(get2).join());
final GetRequest get3 = new GetRequest(table, "mk3", family, "m3");
kvs = client.get(get3).join();
assertSizeIs(1, kvs);
assertEq("mpb3", kvs.get(0).value());
return;
}
throw new AssertionError("Should never be here");
}
/** Lots of buffered counter increments from multiple threads. */
@Test
public void bufferedIncrementStressTest() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
final byte[] key1 = "cnt1".getBytes(); // Spread the increments..
final byte[] key2 = "cnt2".getBytes(); // .. over these two counters.
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final DeleteRequest del1 = new DeleteRequest(table, key1, family, qual);
final DeleteRequest del2 = new DeleteRequest(table, key2, family, qual);
Deferred.group(client.delete(del1), client.delete(del2)).join();
final int nthreads = Runtime.getRuntime().availableProcessors() * 2;
// The magic number comes from the limit on callbacks that Deferred
// imposes. We spread increments over two counters, hence the x 2.
final int incr_per_thread = 8192 / nthreads * 2;
final boolean[] successes = new boolean[nthreads];
final class IncrementThread extends Thread {
private final int num;
public IncrementThread(final int num) {
super("IncrementThread-" + num);
this.num = num;
}
public void run() {
try {
doIncrements();
successes[num] = true;
} catch (Throwable e) {
successes[num] = false;
LOG.error("Uncaught exception", e);
}
}
private void doIncrements() {
for (int i = 0; i < incr_per_thread; i++) {
final byte[] key = i % 2 == 0 ? key1 : key2;
bufferIncrement(table, key, family, qual, 1);
}
}
}
final IncrementThread[] threads = new IncrementThread[nthreads];
for (int i = 0; i < nthreads; i++) {
threads[i] = new IncrementThread(i);
}
LOG.info("Starting to generate increments");
for (int i = 0; i < nthreads; i++) {
threads[i].start();
}
for (int i = 0; i < nthreads; i++) {
threads[i].join();
}
LOG.info("Flushing all buffered increments.");
client.flush().joinUninterruptibly();
LOG.info("Done flushing all buffered increments.");
// Check that we the counters have the expected value.
final GetRequest[] gets = { mkGet(table, key1, family, qual),
mkGet(table, key2, family, qual) };
for (final GetRequest get : gets) {
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
assertEquals(incr_per_thread * nthreads / 2,
Bytes.getLong(kvs.get(0).value()));
}
for (int i = 0; i < nthreads; i++) {
assertEquals(true, successes[i]); // Make sure no exception escaped.
}
}
/** Increment coalescing with values too large to be coalesced. */
@Test
public void incrementCoalescingWithAmountsTooBig() throws Exception {
client.setFlushInterval(SLOW_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
final byte[] key = "cnt".getBytes();
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final DeleteRequest del = new DeleteRequest(table, key, family, qual);
del.setBufferable(false);
client.delete(del).join();
final long big = 1L << 48; // Too big to be coalesced.
final ArrayList<KeyValue> kvs = Deferred.group(
bufferIncrement(table, key, family, qual, big),
bufferIncrement(table, key, family, qual, big)
).addCallbackDeferring(new Callback<Deferred<ArrayList<KeyValue>>,
ArrayList<Long>>() {
public Deferred<ArrayList<KeyValue>> call(final ArrayList<Long> incs) {
final GetRequest get = new GetRequest(table, key)
.family(family).qualifier(qual);
return client.get(get);
}
}).join();
assertSizeIs(1, kvs);
assertEquals(big + big, Bytes.getLong(kvs.get(0).value()));
// Check we sent the right number of RPCs.
assertEquals(2, client.stats().atomicIncrements());
}
/** Increment coalescing with large values that overflow. */
@Test
public void incrementCoalescingWithOverflowingAmounts() throws Exception {
client.setFlushInterval(SLOW_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
final byte[] key = "cnt".getBytes();
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final DeleteRequest del = new DeleteRequest(table, key, family, qual);
del.setBufferable(false);
client.delete(del).join();
final long big = 1L << 47;
// First two RPCs can be coalesced.
bufferIncrement(table, key, family, qual, big);
bufferIncrement(table, key, family, qual, 1);
// This one would cause an overflow, so will be sent as a separate RPC.
// Overflow would happen because the max value is (1L << 48) - 1.
bufferIncrement(table, key, family, qual, big);
client.flush().joinUninterruptibly();
final GetRequest get = new GetRequest(table, key)
.family(family).qualifier(qual);
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
assertEquals(big + 1 + big, Bytes.getLong(kvs.get(0).value()));
// Check we sent the right number of RPCs.
assertEquals(2, client.stats().atomicIncrements());
}
/** Increment coalescing with negative values and underflows. */
@Test
public void incrementCoalescingWithUnderflowingAmounts() throws Exception {
client.setFlushInterval(SLOW_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
final byte[] key = "cnt".getBytes();
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final DeleteRequest del = new DeleteRequest(table, key, family, qual);
del.setBufferable(false);
client.delete(del).join();
final long big = -1L << 47;
// First two RPCs can be coalesced.
bufferIncrement(table, key, family, qual, big);
bufferIncrement(table, key, family, qual, -1);
// This one would cause an underflow, so will be sent as a separate RPC.
// Overflow would happen because the max value is -1L << 48.
bufferIncrement(table, key, family, qual, big);
client.flush().joinUninterruptibly();
final GetRequest get = new GetRequest(table, key)
.family(family).qualifier(qual);
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
assertEquals(big - 1 + big, Bytes.getLong(kvs.get(0).value()));
// Check we sent the right number of RPCs.
assertEquals(2, client.stats().atomicIncrements());
}
/** Increment coalescing where the coalesced sum ends up being zero. */
@Test
public void incrementCoalescingWithZeroSumAmount() throws Exception {
client.setFlushInterval(SLOW_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
final byte[] key = "cnt".getBytes();
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final DeleteRequest del = new DeleteRequest(table, key, family, qual);
del.setBufferable(false);
client.delete(del).join();
// HBase 0.98 and up do not create a KV on atomic increment when the
// increment amount is 0. So let's first send an increment of some
// arbitrary value, and then ensure that this value hasn't changed.
long n = client.atomicIncrement(new AtomicIncrementRequest(table, key,
family, qual,
42)).join();
assertEquals(42, n);
bufferIncrement(table, key, family, qual, 1);
bufferIncrement(table, key, family, qual, 2);
bufferIncrement(table, key, family, qual, -3);
client.flush().joinUninterruptibly();
final GetRequest get = new GetRequest(table, key)
.family(family).qualifier(qual);
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
assertEquals(42, Bytes.getLong(kvs.get(0).value()));
// The sum was 0, but must have sent the increment anyway.
// So in total we should have sent two increments, the initial one,
// that sets the value to 42, and the one incrementing by zero.
assertEquals(2, client.stats().atomicIncrements());
}
/** Helper method to create an atomic increment request. */
private Deferred<Long> bufferIncrement(final byte[] table,
final byte[] key, final byte[] family,
final byte[] qual, final long value) {
return
client.bufferAtomicIncrement(new AtomicIncrementRequest(table, key,
family, qual,
value));
}
/** Helper method to create a get request. */
private static GetRequest mkGet(final byte[] table, final byte[] key,
final byte[] family, final byte[] qual) {
return new GetRequest(table, key).family(family).qualifier(qual);
}
/** Test regexp-based row key filtering. */
@Test
public void keyRegexpFilter() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final PutRequest put1 = new PutRequest(table, "krf accept:by the filter",
family, "q", "krfv1");
final PutRequest put2 = new PutRequest(table, "krf filtered out",
family, "q", "krfv2");
final PutRequest put3 = new PutRequest(table, "krf this is Accepted too",
family, "q", "krfv3");
Deferred.group(client.put(put1), client.put(put2),
client.put(put3)).join();
final Scanner scanner = client.newScanner(table);
scanner.setFamily(family);
scanner.setStartKey("krf ");
scanner.setStopKey("krf!");
scanner.setKeyRegexp("[Aa]ccept(ed)?");
final ArrayList<ArrayList<KeyValue>> rows = scanner.nextRows().join();
assertSizeIs(2, rows);
ArrayList<KeyValue> kvs = rows.get(0);
assertSizeIs(1, kvs);
assertEq("krfv1", kvs.get(0).value());
kvs = rows.get(1);
assertSizeIs(1, kvs);
assertEq("krfv3", kvs.get(0).value());
}
/** Simple column prefix filter tests. */
@Test
public void columnPrefixFilter() throws Exception {
client.setFlushInterval(FAST_FLUSH);
// Keep only rows with a column qualifier that starts with "qa".
final PutRequest put1 = new PutRequest(table, "cpf1", family, "qa1", "v1");
final PutRequest put2 = new PutRequest(table, "cpf1", family, "qa2", "v2");
final PutRequest put3 = new PutRequest(table, "cpf2", family, "qa3", "v3");
final PutRequest put4 = new PutRequest(table, "cpf2", family, "qb4", "v4");
Deferred.group(Deferred.group(client.put(put1), client.put(put2)),
Deferred.group(client.put(put3), client.put(put4))).join();
final Scanner scanner = client.newScanner(table);
scanner.setFamily(family);
scanner.setStartKey("cpf1");
scanner.setStopKey("cpf3");
scanner.setFilter(new ColumnPrefixFilter("qa"));
final ArrayList<ArrayList<KeyValue>> rows = scanner.nextRows().join();
assertSizeIs(2, rows);
ArrayList<KeyValue> kvs = rows.get(0);
assertSizeIs(2, kvs);
assertEq("v1", kvs.get(0).value());
assertEq("v2", kvs.get(1).value());
kvs = rows.get(1);
assertSizeIs(1, kvs);
assertEq("v3", kvs.get(0).value());
}
/** Simple column range filter tests. */
@Test
public void columnRangeFilter() throws Exception {
client.setFlushInterval(FAST_FLUSH);
// Keep rows that have a qualifier in between "qb" (inclusive) and "qd4"
// (exclusive). So only v2 and v3 should be returned by the scanner.
final PutRequest put1 = new PutRequest(table, "crf1", family, "qa1", "v1");
final PutRequest put2 = new PutRequest(table, "crf1", family, "qb2", "v2");
final PutRequest put3 = new PutRequest(table, "crf2", family, "qc3", "v3");
final PutRequest put4 = new PutRequest(table, "crf2", family, "qd4", "v4");
Deferred.group(Deferred.group(client.put(put1), client.put(put2)),
Deferred.group(client.put(put3), client.put(put4))).join();
final Scanner scanner = client.newScanner(table);
scanner.setFamily(family);
scanner.setStartKey("crf1");
scanner.setStopKey("crf3");
scanner.setFilter(new ColumnRangeFilter("qb", true, "qd4", false));
final ArrayList<ArrayList<KeyValue>> rows = scanner.nextRows().join();
assertSizeIs(2, rows); // One KV from row "fl1" and one from "fl2".
ArrayList<KeyValue> kvs = rows.get(0);
assertSizeIs(1, kvs);
assertEq("v2", kvs.get(0).value());
kvs = rows.get(1);
assertSizeIs(1, kvs);
assertEq("v3", kvs.get(0).value());
}
/** Simple column filter list tests. */
@Test
public void filterList() throws Exception {
client.setFlushInterval(FAST_FLUSH);
// Keep rows that have both:
// - a row key that is exactly either "fl1" or "fl2".
// - a qualifier in between "qb" (inclusive) and "qd4" (exclusive).
final ArrayList<ScanFilter> filters = new ArrayList<ScanFilter>(2);
filters.add(new ColumnRangeFilter("qb", true, "qd4", false));
filters.add(new KeyRegexpFilter("fl[12]$"));
// Filtered out as we're looking due to qualifier being out of range:
final PutRequest put1 = new PutRequest(table, "fl1", family, "qa1", "v1");
// Kept by the filter:
final PutRequest put2 = new PutRequest(table, "fl1", family, "qb2", "v2");
// Filtered out because the row key doesn't match the regexp:
final PutRequest put3 = new PutRequest(table, "fl1a", family, "qb3", "v3");
// Kept by the filter:
final PutRequest put4 = new PutRequest(table, "fl2", family, "qc4", "v4");
// Filtered out because the qualifier is on the exclusive upper bound:
final PutRequest put5 = new PutRequest(table, "fl2", family, "qd5", "v5");
// Filtered out because the qualifier is past the upper bound:
final PutRequest put6 = new PutRequest(table, "fl2", family, "qd6", "v6");
Deferred.group(Deferred.group(client.put(put1), client.put(put2),
client.put(put3)),
Deferred.group(client.put(put4), client.put(put5),
client.put(put6))).join();
final Scanner scanner = client.newScanner(table);
scanner.setFamily(family);
scanner.setStartKey("fl0");
scanner.setStopKey("fl9");
scanner.setFilter(new FilterList(filters));
final ArrayList<ArrayList<KeyValue>> rows = scanner.nextRows().join();
assertSizeIs(2, rows); // One KV from row "fl1" and one from "fl2".
ArrayList<KeyValue> kvs = rows.get(0);
assertSizeIs(1, kvs); // KV from "fl1":
assertEq("v2", kvs.get(0).value());
kvs = rows.get(1);
assertSizeIs(1, kvs); // KV from "fl2":
assertEq("v4", kvs.get(0).value());
}
@Test
public void prefetchMeta() throws Exception {
// Prefetch the metadata for a given table, then invasively probe the
// region cache to demonstrate it is filled.
client.prefetchMeta(table).join();
Object region_info = Whitebox.invokeMethod(client, "getRegion",
table.getBytes(),
HBaseClient.EMPTY_ARRAY);
assertNotNull(region_info);
}
/** Regression test for issue #2. */
@Test
public void regression2() throws Exception {
try {
final PutRequest put1 = new PutRequest(table, "k1", family, "q", "val1");
final PutRequest put2 = new PutRequest(table, "k2", family, "q", "val2");
LOG.info("Before calling put()");
client.put(put1);
client.put(put2);
LOG.info("After calling put()");
} finally {
LOG.info("Before calling flush()");
// Flushing immediately a cold client used to be troublesome because we
// wouldn't do a good job at making sure that we can let the client do
// the entire start-up dance (find ROOT, META, issue pending queries...).
client.flush().join();
LOG.info("After calling flush()");
assertEquals(1, client.stats().numBatchedRpcSent());
}
}
/** Regression test for issue #25. */
@Test
public void regression25() throws Exception {
client.setFlushInterval(FAST_FLUSH);
final String table1 = args[0] + "1";
final String table2 = args[0] + "2";
final String family = args[1];
createOrTruncateTable(client, table1, family);
createOrTruncateTable(client, table2, family);
for (int i = 0; i < 2; i++) {
final PutRequest put;
final String key = 'k' + String.valueOf(i);
if (i % 2 == 0) {
put = new PutRequest(table1, key, family, "q", "v");
} else {
put = new PutRequest(table2, key, family, "q", "v");
}
final DeleteRequest delete = new DeleteRequest(put.table(), put.key());
client.delete(delete);
client.put(put);
}
client.flush().joinUninterruptibly();
}
/** Regression test for issue #40 (which was actually Netty bug #474). */
@Test
public void regression40() throws Exception {
// Cause a META lookup first to avoid some DEBUG-level spam due to the
// long key below.
client.ensureTableFamilyExists(table, family).join();
client.setFlushInterval(FAST_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
// 980 was empirically found to be the minimum size with which
// Netty bug #474 gets triggered. Bug got fixed in Netty 3.5.8.
final byte[] key = new byte[980];
key[0] = 'k';
key[1] = '4';
key[2] = '0';
key[key.length - 1] = '*';
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final PutRequest put = new PutRequest(table, key, family, qual,
new byte[0] /* empty */);
final GetRequest get = new GetRequest(table, key);
client.put(put).join();
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
KeyValue kv = kvs.get(0);
assertEq("q", kv.qualifier());
assertEq("", kv.value());
}
/** Regression test for issue #41. */
@Test
public void regression41() throws Exception {
client.setFlushInterval(SLOW_FLUSH);
final byte[] table = TestIntegration.table.getBytes();
final byte[] key = "cnt".getBytes();
final byte[] family = TestIntegration.family.getBytes();
final byte[] qual = { 'q' };
final DeleteRequest del = new DeleteRequest(table, key, family, qual);
del.setBufferable(false);
client.delete(del).join();
final int iterations = 100000;
for (int i = 0; i < iterations; i++) {
bufferIncrement(table, key, family, qual, 1);
}
client.flush().joinUninterruptibly();
final GetRequest get = new GetRequest(table, key)
.family(family).qualifier(qual);
final ArrayList<KeyValue> kvs = client.get(get).join();
assertSizeIs(1, kvs);
assertEquals(iterations, Bytes.getLong(kvs.get(0).value()));
}
private static <T> void assertSizeIs(final int size,
final Collection<T> list) {
final int actual = list.size();
if (size != actual) {
throw new AssertionError("List was expected to contain " + size
+ " items but was found to contain " + actual + ": " + list);
}
}
private static void assertEq(final String expect, final byte[] actual) {
assertArrayEquals(expect.getBytes(), actual);
}
private static void createTable(final String table,
final String family) throws Exception {
LOG.info("Creating table " + table + " with family " + family);
hbaseShell("create '" + table + "',"
+ " {NAME => '" + family + "', VERSIONS => 2}");
}
private static void truncateTable(final String table) throws Exception {
if (!TRUNCATE) {
return;
}
LOG.warn("Truncating table " + table + "...");
for (int i = 3; i >= 0; i
LOG.warn(i + " Press Ctrl-C if you care about " + table);
Thread.sleep(1000);
}
hbaseShell("truncate '" + table + '\'');
}
private static void hbaseShell(final String command) throws Exception {
final ProcessBuilder pb = new ProcessBuilder();
pb.command(HBASE_HOME + "/bin/hbase", "shell");
pb.environment().remove("HBASE_HOME");
LOG.info("Running HBase shell command: " + command);
final Process shell = pb.start();
try {
final OutputStream stdin = shell.getOutputStream();
stdin.write(command.getBytes());
stdin.write('\n');
stdin.flush(); // Technically the JDK doesn't guarantee that close()
stdin.close(); // will flush(), so better do it explicitly to be safe.
// Let's hope that the HBase shell doesn't print more than 4KB of shit
// on stderr, otherwise we're getting deadlocked here. Yeah seriously.
// Dealing with subprocesses in Java is such a royal PITA.
printLines("stdout", shell.getInputStream()); // Confusing method name,
printLines("stderr", shell.getErrorStream()); // courtesy of !@#$% JDK.
final int rv = shell.waitFor();
if (rv != 0) {
throw new RuntimeException("hbase shell returned " + rv);
}
} finally {
shell.destroy(); // Required by the fucking JDK, no matter what.
}
}
private static void printLines(final String what, final InputStream in)
throws Exception {
final BufferedReader r = new BufferedReader(new InputStreamReader(in));
String line;
while ((line = r.readLine()) != null) {
LOG.info('(' + what + ") " + line);
}
}
private static final class JunitListener extends RunListener {
@Override
public void testStarted(final Description description) {
LOG.info("Running test " + description.getMethodName());
}
@Override
public void testFinished(final Description description) {
LOG.info("Done running test " + description.getMethodName());
}
@Override
public void testFailure(final Failure failure) {
LOG.error("Test failed: " + failure.getDescription().getMethodName(),
failure.getException());
}
@Override
public void testIgnored(final Description description) {
LOG.info("Test ignored: " + description.getMethodName());
}
}
}
|
package org.commcare.tasks;
import android.content.Context;
import android.os.AsyncTask;
import org.commcare.CommCareApplication;
import org.commcare.logging.AndroidLogger;
import org.commcare.models.FormRecordProcessor;
import org.commcare.android.database.user.models.FormRecord;
import org.commcare.suite.model.Profile;
import org.commcare.tasks.templates.CommCareTask;
import org.commcare.utils.FormUploadUtil;
import org.commcare.utils.SessionUnavailableException;
import org.commcare.views.notifications.NotificationMessageFactory;
import org.commcare.views.notifications.ProcessIssues;
import org.javarosa.core.model.User;
import org.javarosa.core.services.Logger;
import org.javarosa.xml.util.InvalidStorageStructureException;
import org.javarosa.xml.util.InvalidStructureException;
import org.javarosa.xml.util.UnfullfilledRequirementsException;
import org.xmlpull.v1.XmlPullParserException;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.PrintStream;
import java.util.LinkedList;
import java.util.Queue;
import javax.crypto.spec.SecretKeySpec;
/**
* @author ctsims
*/
public abstract class ProcessAndSendTask<R> extends CommCareTask<FormRecord, Long, Integer, R> implements DataSubmissionListener {
private Context c;
private String url;
private Long[] results;
private final int sendTaskId;
public static final int PROCESSING_PHASE_ID = 8;
public static final int SEND_PHASE_ID = 9;
public static final long PROGRESS_ALL_PROCESSED = 8;
public static final long SUBMISSION_BEGIN = 16;
public static final long SUBMISSION_START = 32;
public static final long SUBMISSION_NOTIFY = 64;
public static final long SUBMISSION_DONE = 128;
public static final long PROGRESS_LOGGED_OUT = 256;
public static final long PROGRESS_SDCARD_REMOVED = 512;
private DataSubmissionListener formSubmissionListener;
private final FormRecordProcessor processor;
private static final int SUBMISSION_ATTEMPTS = 2;
private static final Queue<ProcessAndSendTask> processTasks = new LinkedList<>();
public ProcessAndSendTask(Context c, String url) {
this(c, url, true);
}
/**
* @param inSyncMode blocks the user with a sync dialog
*/
public ProcessAndSendTask(Context c, String url, boolean inSyncMode) {
this.c = c;
this.url = url;
this.processor = new FormRecordProcessor(c);
if (inSyncMode) {
this.sendTaskId = SEND_PHASE_ID;
this.taskId = PROCESSING_PHASE_ID;
} else {
this.sendTaskId = -1;
this.taskId = -1;
}
}
@Override
protected Integer doTaskBackground(FormRecord... records) {
boolean needToSendLogs = false;
try {
results = new Long[records.length];
for (int i = 0; i < records.length; ++i) {
//Assume failure
results[i] = FormUploadUtil.FAILURE;
}
//The first thing we need to do is make sure everything is processed,
//we can't actually proceed before that.
try {
needToSendLogs = checkFormRecordStatus(records);
} catch (FileNotFoundException e) {
return (int)PROGRESS_SDCARD_REMOVED;
} catch (TaskCancelledException e) {
return (int)FormUploadUtil.FAILURE;
}
this.publishProgress(PROGRESS_ALL_PROCESSED);
//Put us on the queue!
synchronized (processTasks) {
processTasks.add(this);
}
boolean needToRefresh;
try {
needToRefresh = blockUntilTopOfQueue();
} catch (TaskCancelledException e) {
return (int)FormUploadUtil.FAILURE;
}
if (needToRefresh) {
//There was another activity before this one. Refresh our models in case
//they were updated
for (int i = 0; i < records.length; ++i) {
int dbId = records[i].getID();
records[i] = processor.getRecord(dbId);
}
}
//Ok, all forms are now processed. Time to focus on sending
if (formSubmissionListener != null) {
formSubmissionListener.beginSubmissionProcess(records.length);
}
sendForms(records);
long result = 0;
for (int i = 0; i < records.length; ++i) {
if (results[i] > result) {
result = results[i];
}
}
return (int)result;
} catch (SessionUnavailableException sue) {
this.cancel(false);
return (int)PROGRESS_LOGGED_OUT;
} finally {
this.endSubmissionProcess();
synchronized (processTasks) {
processTasks.remove(this);
}
if (needToSendLogs) {
CommCareApplication._().notifyLogsPending();
}
}
}
private boolean checkFormRecordStatus(FormRecord[] records)
throws FileNotFoundException, TaskCancelledException {
boolean needToSendLogs = false;
processor.beginBulkSubmit();
for (int i = 0; i < records.length; ++i) {
if (isCancelled()) {
throw new TaskCancelledException();
}
FormRecord record = records[i];
//If the form is complete, but unprocessed, process it.
if (FormRecord.STATUS_COMPLETE.equals(record.getStatus())) {
try {
records[i] = processor.process(record);
} catch (InvalidStructureException e) {
CommCareApplication._().reportNotificationMessage(NotificationMessageFactory.message(ProcessIssues.BadTransactions), true);
Logger.log(AndroidLogger.TYPE_ERROR_DESIGN, "Removing form record due to transaction data|" + getExceptionText(e));
FormRecordCleanupTask.wipeRecord(c, record);
needToSendLogs = true;
} catch (XmlPullParserException e) {
CommCareApplication._().reportNotificationMessage(NotificationMessageFactory.message(ProcessIssues.BadTransactions), true);
Logger.log(AndroidLogger.TYPE_ERROR_DESIGN, "Removing form record due to bad xml|" + getExceptionText(e));
FormRecordCleanupTask.wipeRecord(c, record);
needToSendLogs = true;
} catch (UnfullfilledRequirementsException e) {
CommCareApplication._().reportNotificationMessage(NotificationMessageFactory.message(ProcessIssues.BadTransactions), true);
Logger.log(AndroidLogger.TYPE_ERROR_DESIGN, "Removing form record due to bad requirements|" + getExceptionText(e));
FormRecordCleanupTask.wipeRecord(c, record);
needToSendLogs = true;
} catch (FileNotFoundException e) {
if (CommCareApplication._().isStorageAvailable()) {
//If storage is available generally, this is a bug in the app design
Logger.log(AndroidLogger.TYPE_ERROR_DESIGN, "Removing form record because file was missing|" + getExceptionText(e));
FormRecordCleanupTask.wipeRecord(c, record);
} else {
CommCareApplication._().reportNotificationMessage(NotificationMessageFactory.message(ProcessIssues.StorageRemoved), true);
//Otherwise, the SD card just got removed, and we need to bail anyway.
throw e;
}
} catch(InvalidStorageStructureException e) {
// Thrown when updating a case that isn't present
// Do same behavior as InvalidStructureException (wipe record)
CommCareApplication._().reportNotificationMessage(NotificationMessageFactory.message(ProcessIssues.BadTransactions), true);
Logger.log(AndroidLogger.TYPE_ERROR_DESIGN, "Removing form record due to case transaction data|" + getExceptionText(e));
FormRecordCleanupTask.wipeRecord(c, record);
needToSendLogs = true;
} catch (IOException e) {
Logger.log(AndroidLogger.TYPE_ERROR_WORKFLOW, "IO Issues processing a form. Tentatively not removing in case they are resolvable|" + getExceptionText(e));
}
}
}
processor.closeBulkSubmit();
return needToSendLogs;
}
private boolean blockUntilTopOfQueue() throws TaskCancelledException {
boolean needToRefresh = false;
while (true) {
//See if it's our turn to go
synchronized (processTasks) {
if (isCancelled()) {
processTasks.remove(this);
throw new TaskCancelledException();
}
//Are we at the head of the queue?
ProcessAndSendTask head = processTasks.peek();
if (head == this) {
break;
}
//Otherwise, is the head of the queue busted?
|
package stroom.app;
import stroom.app.guice.BootStrapModule;
import stroom.cluster.lock.impl.db.ClusterLockConfig;
import stroom.cluster.lock.impl.db.ClusterLockConfig.ClusterLockDbConfig;
import stroom.config.app.AppConfig;
import stroom.config.app.Config;
import stroom.config.common.AbstractDbConfig;
import stroom.config.common.CommonDbConfig;
import stroom.config.common.ConnectionConfig;
import stroom.db.util.DataSourceProxy;
import stroom.db.util.DbUtil;
import stroom.db.util.JooqUtil;
import stroom.util.BuildInfoProvider;
import stroom.util.NullSafe;
import stroom.util.date.DateUtil;
import stroom.util.db.DbMigrationState;
import stroom.util.guice.GuiceUtil;
import stroom.util.logging.LambdaLogger;
import stroom.util.logging.LambdaLoggerFactory;
import stroom.util.logging.LogUtil;
import stroom.util.shared.BuildInfo;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import io.dropwizard.setup.Environment;
import org.jetbrains.annotations.NotNull;
import org.jooq.Configuration;
import org.jooq.DSLContext;
import org.jooq.impl.DSL;
import java.nio.file.Path;
import java.sql.Connection;
import java.sql.SQLException;
import java.time.Duration;
import java.time.Instant;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Supplier;
import java.util.stream.Collectors;
import javax.sql.DataSource;
public class BootstrapUtil {
private static final LambdaLogger LOGGER = LambdaLoggerFactory.getLogger(BootstrapUtil.class);
private static final String BUILD_VERSION_TABLE_NAME = "build_version";
private static final int BUILD_VERSION_TABLE_ID = 1;
private static final String INITIAL_VERSION = "UNKNOWN_VERSION";
private static final String SNAPSHOT_VERSION = "SNAPSHOT";
private BootstrapUtil() {
}
/**
* Creates an injector with the bare minimum to initialise the DB connections and configuration.
* The initialisation of the DB datasources will trigger the FlyWay DB migration to run.
* You should call {@link Injector#createChildInjector} to build a fully formed injector from it.
*/
public static Injector bootstrapApplication(final Config configuration,
final Environment environment,
final Path configFile) {
return bootstrapApplication(
configuration,
() -> new BootStrapModule(configuration, environment, configFile));
}
/**
* Creates an injector with the bare minimum to initialise the DB connections and configuration.
* The initialisation of the DB datasources will trigger the FlyWay DB migration to run.
* You should call {@link Injector#createChildInjector} to build a fully formed injector from it.
*/
public static Injector bootstrapApplication(final Config configuration,
final Path configFile) {
return bootstrapApplication(
configuration,
() -> new BootStrapModule(configuration, configFile));
}
private static Injector bootstrapApplication(
final Config configuration,
final Supplier<BootStrapModule> bootStrapModuleSupplier) {
Objects.requireNonNull(configuration);
Objects.requireNonNull(bootStrapModuleSupplier);
// Create a minimalist injector with just the BuildInfo so we can determine
// what version the system is at and what we need to do before creating
// the bootstrap injector
final BuildInfo buildInfo = BuildInfoProvider.getBuildInfo();
showBuildInfo(buildInfo);
// In dev the build ver will always be SNAPSHOT so append the now time to make
// it different to force the migrations to always run in dev.
String buildVersion = Objects.requireNonNullElse(buildInfo.getBuildVersion(), SNAPSHOT_VERSION);
buildVersion = SNAPSHOT_VERSION.equals(buildVersion)
? SNAPSHOT_VERSION + "_" + DateUtil.createNormalDateTimeString()
: buildVersion;
LOGGER.debug("buildVersion: '{}'", buildVersion);
return BootstrapUtil.doWithBootstrapLock(
configuration,
buildVersion, () -> {
LOGGER.info("Initialising database connections and configuration properties");
final BootStrapModule bootstrapModule = bootStrapModuleSupplier.get();
final Injector injector = Guice.createInjector(bootstrapModule);
// Force all data sources to be created so we can force migrations to run.
final Set<DataSource> dataSources = injector.getInstance(
Key.get(GuiceUtil.setOf(DataSource.class)));
LOGGER.debug(() -> LogUtil.message("Used {} data sources:\n{}",
dataSources.size(),
dataSources.stream()
.map(dataSource -> {
final String prefix = dataSource instanceof DataSourceProxy
? ((DataSourceProxy) dataSource).getName() + " - "
: "";
return prefix + dataSource.getClass().getName();
})
.map(name -> " " + name)
.sorted()
.collect(Collectors.joining("\n"))));
return injector;
});
}
private static void showBuildInfo(final BuildInfo buildInfo) {
Objects.requireNonNull(buildInfo);
LOGGER.info(""
+ "\n********************************************************************************"
+ "\n Build version: " + buildInfo.getBuildVersion()
+ "\n Build date: " + buildInfo.getBuildDate()
+ "\n********************************************************************************");
}
/**
* Use a table lock on a noddy table created just for this purpose to enforce
* a cluster wide lock so we can do all the flyway migrations in isolation.
* Flyway migrations should work under lock but we have lots of independent flyway
* migrations so it is safer to do it this way.
* Can't user the cluster lock service as that is not a thing at this point.
*/
static <T> T doWithBootstrapLock(final Config config,
final String buildVersion,
final Supplier<T> work) {
Objects.requireNonNull(work);
// We need to use one of the
final ConnectionConfig connectionConfig = getClusterLockConnectionConfig(config);
DbUtil.waitForConnection(connectionConfig);
final AtomicBoolean doneWork = new AtomicBoolean(false);
T workOutput;
try (Connection conn = DbUtil.getSingleConnection(connectionConfig)) {
// Need read committed so that once we have acquired the lock we can see changes
// committed by other nodes.
conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
final DSLContext context = JooqUtil.createContext(conn);
workOutput = context.transactionResult(txnConfig -> {
// make sure we have a populated build_version table to check and lock on
ensureBuildVersionTable(txnConfig, conn);
String dbBuildVersion = getBuildVersionFromDb(txnConfig);
boolean isDbBuildVersionUpToDate = dbBuildVersion.equals(buildVersion);
T output = null;
if (isDbBuildVersionUpToDate) {
LOGGER.info("Found required build version '{}' in {} table, no lock or DB migration required.",
dbBuildVersion,
BUILD_VERSION_TABLE_NAME);
} else {
LOGGER.info("Found old build version '{}' in {} table. Bootstrap lock and DB migration required.",
dbBuildVersion, BUILD_VERSION_TABLE_NAME);
acquireBootstrapLock(connectionConfig, txnConfig);
// We now hold a cluster wide lock
final Instant startTime = Instant.now();
// Re-check the lock table state now we are under lock
// For the first node these should be the same as when checked above
dbBuildVersion = getBuildVersionFromDb(txnConfig);
isDbBuildVersionUpToDate = dbBuildVersion.equals(buildVersion);
if (isDbBuildVersionUpToDate) {
// Another node has done the bootstrap so we can just drop out of the txn/connection to
// free up the lock
LOGGER.info("Found required build version '{}' in {} table, releasing lock. " +
"No DB migration required.",
buildVersion, BUILD_VERSION_TABLE_NAME);
} else {
LOGGER.info("Upgrading stroom from '{}' to '{}' under lock", dbBuildVersion, buildVersion);
// We hold the lock and the db version is out of date so perform work under lock
// including doing all the flyway migrations
output = work.get();
doneWork.set(true);
// If anything fails and we don't update/insert these it is fine, it will just
// get done on next successful boot
updateDbBuildVersion(buildVersion, txnConfig);
}
// We are the first node to get the lock for this build version so now release the lock
LOGGER.info(LogUtil.message("Releasing bootstrap lock after {}",
Duration.between(startTime, Instant.now())));
}
// Set local state so the db modules know not to run flyway when work.get() is called
// below
DbMigrationState.markBootstrapMigrationsComplete();
return output;
});
LOGGER.debug("Closed connection");
} catch (SQLException e) {
throw new RuntimeException("Error obtaining bootstrap lock: " + e.getMessage(), e);
}
// If we didn't do it under lock then we need to do it now
if (!doneWork.get()) {
workOutput = work.get();
}
return workOutput;
}
private static void updateDbBuildVersion(final String buildVersion,
final Configuration txnConfig) {
LOGGER.info("Updating {} table with current build version: {}",
BUILD_VERSION_TABLE_NAME, buildVersion);
DSL.using(txnConfig)
.execute(LogUtil.message("""
UPDATE {}
SET build_version = ?
WHERE id = ?
""", BUILD_VERSION_TABLE_NAME),
buildVersion, BUILD_VERSION_TABLE_ID);
}
@NotNull
private static String getBuildVersionFromDb(final Configuration txnConfig) {
return DSL.using(txnConfig)
.fetchOne(LogUtil.message("""
SELECT build_version
FROM {}
WHERE id = ?""", BUILD_VERSION_TABLE_NAME),
BUILD_VERSION_TABLE_ID)
.get(0, String.class);
}
private static void acquireBootstrapLock(final ConnectionConfig connectionConfig,
final Configuration txnConfig) {
LOGGER.info("Waiting to acquire bootstrap lock on table: {}, user: {}, url: {}",
BUILD_VERSION_TABLE_NAME, connectionConfig.getUser(), connectionConfig.getUrl());
Instant startTime = Instant.now();
final String sql = LogUtil.message("""
SELECT *
FROM {}
WHERE id = ?
FOR UPDATE""", BUILD_VERSION_TABLE_NAME);
DSL.using(txnConfig)
.execute(sql, BUILD_VERSION_TABLE_ID);
LOGGER.info("Waited {} to acquire bootstrap lock",
Duration.between(startTime, Instant.now()));
}
private static void ensureBuildVersionTable(final Configuration txnConfig, final Connection connection) {
try {
// Need read committed so that once we have acquired the lock we can see changes
// committed by other nodes, e.g. we want to see if another node has inserted the
// new record.
// Create a table that we can use to get a table lock on
final String createTableSql = LogUtil.message("""
CREATE TABLE IF NOT EXISTS {} (
id INT NOT NULL,
build_version VARCHAR(255) NOT NULL,
PRIMARY KEY (id)
) ENGINE=InnoDB DEFAULT CHARACTER SET utf8mb4 COLLATE utf8mb4_0900_ai_ci""",
BUILD_VERSION_TABLE_NAME);
LOGGER.debug("Ensuring table {} exists", BUILD_VERSION_TABLE_NAME);
// Causes implicit commit
DSL.using(txnConfig)
.execute(createTableSql);
// Do a select first to avoid being blocked by the row lock with the insert stmt below
final boolean isRecordPresent = DSL.using(txnConfig)
.fetchOptional(LogUtil.message("""
SELECT build_version
FROM {}
WHERE id = ?""", BUILD_VERSION_TABLE_NAME),
BUILD_VERSION_TABLE_ID)
.isPresent();
if (!isRecordPresent) {
// Ensure we have a record that we can get a lock on
// Done like this in case another node gets in before us.
// This may block if another node beat us to it and locked the row but
// that is fine.
final String insertSql = LogUtil.message("""
INSERT INTO {} (
id,
build_version)
SELECT ?, ?
FROM DUAL
WHERE NOT EXISTS (
SELECT NULL
FROM {}
WHERE ID = ?)
LIMIT 1""",
BUILD_VERSION_TABLE_NAME,
BUILD_VERSION_TABLE_NAME);
final int result = DSL.using(txnConfig)
.execute(insertSql,
BUILD_VERSION_TABLE_ID,
INITIAL_VERSION,
BUILD_VERSION_TABLE_ID);
// Make sure other nodes can see it
if (result > 0) {
LOGGER.info("Committing new {} row", BUILD_VERSION_TABLE_NAME);
connection.commit();
}
}
} catch (Exception e) {
throw new RuntimeException("Error ensuring table "
+ BUILD_VERSION_TABLE_NAME + ": "
+ e.getMessage(), e);
}
}
private static ConnectionConfig getClusterLockConnectionConfig(final Config config) {
final CommonDbConfig yamlCommonDbConfig = Objects.requireNonNullElse(
config.getYamlAppConfig().getCommonDbConfig(),
new CommonDbConfig());
final ClusterLockDbConfig yamlClusterLockDbConfig = NullSafe.getAsOptional(
config.getYamlAppConfig(),
AppConfig::getClusterLockConfig,
ClusterLockConfig::getDbConfig)
.orElse(new ClusterLockDbConfig());
final AbstractDbConfig mergedClusterLockDbConfig = yamlCommonDbConfig.mergeConfig(yamlClusterLockDbConfig);
final ConnectionConfig connectionConfig = mergedClusterLockDbConfig.getConnectionConfig();
LOGGER.debug(() -> LogUtil.message("Using connection user: {}, url: {}, class: {}",
connectionConfig.getUser(),
connectionConfig.getUrl(),
connectionConfig.getClassName()));
DbUtil.validate(connectionConfig);
return connectionConfig;
}
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package filedemultiplexer;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
/**
*
* @author Alp Sayin
*/
public class FileDemultiplexer
{
public static final int READ_BUFFER_SIZE = 4*1024*1024;
public static final int WRITE_BUFFER_SIZE = READ_BUFFER_SIZE/8;
private String inputFilename;
private String outputFilenamePattern;
private String outputFilenameExtension;
private String outputFileLocation;
private int demuxCount;
private int read_buffer_size = READ_BUFFER_SIZE;
private int write_buffer_size = WRITE_BUFFER_SIZE;
public FileDemultiplexer(String inputFilename, String outputFilenamePattern, String outputFilenameExtension, int demuxCount)
{
this.inputFilename = inputFilename;
this.outputFilenamePattern = outputFilenamePattern;
this.outputFilenameExtension = outputFilenameExtension;
this.outputFileLocation = ".";
this.demuxCount = demuxCount;
}
public FileDemultiplexer(String inputFilename, String outputFilenamePattern, String outputFilenameExtension, String outputFileLocation, int demuxCount, int read_buffer_size, int write_buffer_size)
{
this.inputFilename = inputFilename;
this.outputFilenamePattern = outputFilenamePattern;
this.outputFilenameExtension = outputFilenameExtension;
this.outputFileLocation = outputFileLocation;
this.demuxCount = demuxCount;
this.read_buffer_size = read_buffer_size*1024;
this.write_buffer_size = write_buffer_size*1024;
}
public void demultiplex() throws Exception
{
File inputFile = new File(getInputFilename());
OutputFile[] outputFiles = new OutputFile[getDemuxCount()];
for(int i=0; i<getDemuxCount(); i++)
{
outputFiles[i] = new OutputFile(this.getOutputFileLocation()+File.separator+getOutputFilenamePattern()+i+"."+getOutputFilenameExtension());
}
FileInputStream fis = new FileInputStream(inputFile);
BufferedInputStream bis = new BufferedInputStream(fis, this.getRead_buffer_size());
byte[] nextBytes = new byte[4*1024*1024];
boolean keepReading = true;
while(keepReading)
{
int bytesRead = bis.read(nextBytes);
if(bytesRead == -1)
{
System.out.println("End of input file");
keepReading = false;
break;
}
for(int i=0; i<bytesRead; i++)
{
byte nextByte = nextBytes[i];
int[] bits = get8Bits(nextByte);
for(int j=0; j<getDemuxCount(); j++)
{
outputFiles[j].writeBit(bits[j]);
}
}
}
bis.close();
for(int i=0; i<getDemuxCount(); i++)
{
outputFiles[i].close();
}
}
/**
* @return the inputFilename
*/
public String getInputFilename()
{
return inputFilename;
}
/**
* @param inputFilename the inputFilename to set
*/
public void setInputFilename(String inputFilename)
{
this.inputFilename = inputFilename;
}
/**
* @return the outputFilenamePattern
*/
public String getOutputFilenamePattern()
{
return outputFilenamePattern;
}
/**
* @param outputFilenamePattern the outputFilenamePattern to set
*/
public void setOutputFilenamePattern(String outputFilenamePattern)
{
this.outputFilenamePattern = outputFilenamePattern;
}
/**
* @return the outputFilenameExtension
*/
public String getOutputFilenameExtension()
{
return outputFilenameExtension;
}
/**
* @param outputFilenameExtension the outputFilenameExtension to set
*/
public void setOutputFilenameExtension(String outputFilenameExtension)
{
this.outputFilenameExtension = outputFilenameExtension;
}
/**
* @return the demuxCount
*/
public int getDemuxCount()
{
return demuxCount;
}
/**
* @param demuxCount the demuxCount to set
*/
public void setDemuxCount(int demuxCount)
{
this.demuxCount = demuxCount;
}
/**
* @return the read_buffer_size
*/
public int getRead_buffer_size()
{
return read_buffer_size;
}
/**
* @param read_buffer_size the read_buffer_size to set
*/
public void setRead_buffer_size(int read_buffer_size)
{
this.read_buffer_size = read_buffer_size;
}
/**
* @return the write_buffer_size
*/
public int getWrite_buffer_size()
{
return write_buffer_size;
}
/**
* @param write_buffer_size the write_buffer_size to set
*/
public void setWrite_buffer_size(int write_buffer_size)
{
this.write_buffer_size = write_buffer_size;
}
/**
* @return the outputFileLocation
*/
public String getOutputFileLocation()
{
return outputFileLocation;
}
/**
* @param outputFileLocation the outputFileLocation to set
*/
public void setOutputFileLocation(String outputFileLocation)
{
this.outputFileLocation = outputFileLocation;
}
private class OutputFile
{
private File file;
private FileOutputStream fos;
private BufferedOutputStream bos;
private int byteBuf;
private int bitCounter;
public OutputFile(String filename) throws FileNotFoundException, IOException
{
this.file = new File(filename);
if(!this.file.exists())
this.file.createNewFile();
this.fos = new FileOutputStream(file);
this.bos = new BufferedOutputStream(fos, getWrite_buffer_size());
this.byteBuf = 0;
this.bitCounter = 0;
}
public void writeBit(int bit) throws IOException
{
byteBuf = ((byteBuf << 1) & 0xFF);
byteBuf |= bit;
bitCounter = (bitCounter+1) % 8;
if(bitCounter == 0)
{
this.bos.write(byteBuf);
}
}
public void writeByte(int byt) throws IOException
{
this.bos.write(byt);
}
public void close() throws IOException
{
this.bos.close();
}
}
private static int getBit(int data, int position)
{
return (data & (1 << position)) >> (position);
}
private static int[] get8Bits(int data)
{
int[] retVal = new int[8];
retVal[0] = ((data & (1 << 0)) >> 0) & 0x1;
retVal[1] = ((data & (1 << 1)) >> 1) & 0x1;
retVal[2] = ((data & (1 << 2)) >> 2) & 0x1;
retVal[3] = ((data & (1 << 3)) >> 3) & 0x1;
retVal[4] = ((data & (1 << 4)) >> 4) & 0x1;
retVal[5] = ((data & (1 << 5)) >> 5) & 0x1;
retVal[6] = ((data & (1 << 6)) >> 6) & 0x1;
retVal[7] = ((data & (1 << 7)) >> 7) & 0x1;
return retVal;
}
private static int[] getBits(byte[] data, int numOfBits) throws Exception
{
if(numOfBits % 8 != 0)
throw new Exception("GetBits can only work for numOfBits which are multiple of 8");
if(numOfBits/8 != data.length)
throw new Exception("Too much or too less data input to getBits");
int[] retVal = new int[numOfBits];
int byteIndex = 0;
for(int i=0; i<numOfBits; i++)
{
retVal[i] = ((data[byteIndex] & (1 << i)) >> i) & 0x1;
if(i+1 % 8 == 0)
byteIndex++;
}
return retVal;
}
}
|
package nl.mpi.kinnate.export;
import java.awt.Component;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import nl.mpi.arbil.userstorage.SessionStorage;
import nl.mpi.arbil.util.BugCatcherManager;
import nl.mpi.arbil.util.MessageDialogHandler;
import nl.mpi.kinnate.SavePanel;
import nl.mpi.kinnate.kindata.EntityData;
public class ExportToR {
private SessionStorage sessionStorage;
private MessageDialogHandler dialogHandler;
public ExportToR(SessionStorage sessionStorage, MessageDialogHandler dialogHandler) {
this.sessionStorage = sessionStorage;
this.dialogHandler = dialogHandler;
}
public void doExport(Component mainFrame, SavePanel savePanel, File destinationFile) {
PedigreePackageExport packageExport = new PedigreePackageExport();
try {
FileWriter fileWriter = new FileWriter(destinationFile, false);
final EntityData[] dataNodes = savePanel.getGraphPanel().dataStoreSvg.graphData.getDataNodes();
ArrayList<EntityData> visibleEntities = new ArrayList<EntityData>();
for (EntityData currentEntity : dataNodes) {
if (currentEntity.isVisible) {
visibleEntities.add(currentEntity);
}
}
fileWriter.write(packageExport.createCsvContents(visibleEntities.toArray(new EntityData[]{})));
fileWriter.close();
dialogHandler.addMessageDialogToQueue("Exported " + visibleEntities.size() + " entities", "Export");
// ArbilWindowManager.getSingleInstance().addMessageDialogToQueue("File saved", "Export");
} catch (IOException exception) {
dialogHandler.addMessageDialogToQueue("Error, could not export the data to file", "Export");
BugCatcherManager.getBugCatcher().logError(exception);
}
}
// example usage:
// install.packages()
// pid id momid dadid sex affected
// 24 1 0 0 1 1
// 24 2 0 0 2 1
// 24 3 1 2 1 2
// 24 4 0 0 2 2
// 24 5 3 4 1 3
//
// dataFrame <- read.table("~/kinship-r.csv",header=T)
// library(kinship)
// attach(dataFrame)
// pedigreeObj <- pedigree(id,momid,dadid,sex, affected)
// plot(pedigreeObj)
// Suggestion from Dan on R package to look into in detail
}
|
package org.jivesoftware.wildfire.filetransfer.spi;
import org.jivesoftware.util.CacheSizes;
import org.jivesoftware.wildfire.filetransfer.ProxyTransfer;
import java.net.Socket;
import java.util.concurrent.Future;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.DataOutputStream;
/**
* Tracks the different connections related to a file transfer. There are two connections, the
* initiator and the target and when both connections are completed the transfer can begin.
*/
public class DefaultProxyTransfer implements ProxyTransfer {
private static long amountTransfered;
private String initiator;
private Socket initiatorSocket;
private Socket targetSocket;
private String target;
private String transferDigest;
private String streamID;
private Future<?> future;
private long amountWritten;
private static final int BUFFER_SIZE = 8000;
public DefaultProxyTransfer() { }
public String getInitiator() {
return initiator;
}
/**
* Sets the fully qualified JID of the initiator of the file transfer.
*
* @param initiator The fully qualified JID of the initiator of the file transfer.
*/
public void setInitiator(String initiator) {
this.initiator = initiator;
}
public Socket getInitiatorSocket() {
return initiatorSocket;
}
public void setInitiatorSocket(Socket initiatorSocket) {
this.initiatorSocket = initiatorSocket;
}
public Socket getTargetSocket() {
return targetSocket;
}
public void setTargetSocket(Socket targetSocket) {
this.targetSocket = targetSocket;
}
public String getTarget() {
return target;
}
public void setTarget(String target) {
this.target = target;
}
public String getTransferDigest() {
return transferDigest;
}
public void setTransferDigest(String transferDigest) {
this.transferDigest = transferDigest;
}
public String getSessionID() {
return streamID;
}
public void setSessionID(String streamID) {
this.streamID = streamID;
}
public boolean isActivatable() {
return ((initiatorSocket != null) && (targetSocket != null));
}
public synchronized void setTransferFuture(Future<?> future) {
if(this.future != null) {
throw new IllegalStateException("Transfer is already in progress, or has completed.");
}
this.future = future;
}
public long getAmountTransfered() {
return amountWritten;
}
public void doTransfer() throws IOException {
if(!isActivatable()) {
throw new IOException("Transfer missing party");
}
InputStream in = getInitiatorSocket().getInputStream();
OutputStream out = new ProxyOutputStream(getTargetSocket().getOutputStream());
final byte[] b = new byte[BUFFER_SIZE];
int count = 0;
amountWritten = 0;
do {
// write to the output stream
out.write(b, 0, count);
amountWritten += count;
// read more bytes from the input stream
count = in.read(b);
} while (count >= 0);
getInitiatorSocket().close();
getTargetSocket().close();
}
public int getCachedSize() {
// Approximate the size of the object in bytes by calculating the size
// of each field.
int size = 0;
size += CacheSizes.sizeOfObject(); // overhead of object
size += CacheSizes.sizeOfString(initiator);
size += CacheSizes.sizeOfString(target);
size += CacheSizes.sizeOfString(transferDigest);
size += CacheSizes.sizeOfString(streamID);
size += CacheSizes.sizeOfLong(); // Amount written
size += CacheSizes.sizeOfObject(); // Initiatior Socket
size += CacheSizes.sizeOfObject(); // Target socket
size += CacheSizes.sizeOfObject(); // Future
return size;
}
static class ProxyOutputStream extends DataOutputStream {
public ProxyOutputStream(OutputStream out) {
super(out);
}
public synchronized void write(byte b[], int off, int len) throws IOException {
super.write(b, off, len);
amountTransfered += len;
}
}
}
|
package foam.core;
import foam.dao.pg.IndexedPreparedStatement;
import foam.nanos.logger.Logger;
import java.lang.UnsupportedOperationException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
public abstract class AbstractArrayPropertyInfo
extends AbstractPropertyInfo
{
@Override
public void setFromString(Object obj, String value) {
if ( value == null ) {
this.set(obj, null);
return;
}
// TODO: TO REUSE THIS LIST WITH A THREADLOCAL FOR BETTER PERFORMANCE
List<String> list = new LinkedList<String>();
StringBuilder sb = new StringBuilder();
char prev = '$';
char[] cs = value.toCharArray();
for ( int i = 0 ; i < cs.length ; i++ ) {
if ( cs[i] == '\\' ) {
if ( prev == '\\' ) {
sb.append("\\");
prev = '$';
} else {
prev = '\\';
}
} else if ( cs[i] == ',' ) {
if ( prev == '\\' ) {
sb.append(',');
} else {
list.add(sb.toString());
sb.setLength(0);
}
prev = '$';
} else {
sb.append(cs[i]);
prev = cs[i];
}
}
list.add(sb.toString());
String[] result = new String[list.size()];
//add support for other array types
this.set(obj, list.toArray(result));
}
public abstract String of();
// NESTED ARRAY
@Override
public Object fromXML(X x, XMLStreamReader reader) {
List objList = new ArrayList();
String startTag = reader.getLocalName();
try {
int eventType;
while ( reader.hasNext() ) {
eventType = reader.next();
switch ( eventType ) {
case XMLStreamConstants.START_ELEMENT:
if ( reader.getLocalName().equals("value") ) {
// TODO: TYPE CASTING FOR PROPER CONVERSION. NEED FURTHER SUPPORT FOR PRIMITIVE TYPES
throw new UnsupportedOperationException("Primitive typed array XML reading is not supported yet");
}
break;
case XMLStreamConstants.END_ELEMENT:
if ( reader.getLocalName() == startTag ) { return objList.toArray(); }
}
}
} catch (XMLStreamException ex) {
Logger logger = (Logger) x.get("logger");
logger.error("Premature end of XML file");
}
return objList.toArray();
}
@Override
public void toXML (FObject obj, Document doc, Element objElement) {
if ( this.f(obj) == null ) return;
Element prop = doc.createElement(this.getName());
objElement.appendChild(prop);
Object[] nestObj = (Object[]) this.f(obj);
for ( int k = 0; k < nestObj.length; k++ ) {
Element nestedProp = doc.createElement("value");
nestedProp.appendChild(doc.createTextNode(nestObj[k].toString()));
prop.appendChild(nestedProp);
}
}
@Override
public void setStatementValue(IndexedPreparedStatement stmt, FObject o) throws java.sql.SQLException {
Object obj = this.get(o);
if ( obj == null ) {
stmt.setObject(null);
return;
}
Object[] os = (Object[]) obj;
StringBuilder sb = new StringBuilder();
int length = os.length;
if ( length == 0 ) {
stmt.setObject(null);
return;
}
for ( int i = 0 ; i < length ; i++ ) {
if ( os[i] == null ) {
sb.append("");
} else {
escapeCommasAndAppend(sb, os[i]);
}
if ( i < length - 1 ) {
sb.append(",");
}
}
stmt.setObject(sb.toString());
}
@Override
public void setFromResultSet(java.sql.ResultSet resultSet, int index, FObject o) throws java.sql.SQLException {
String value = (String) resultSet.getObject(index);
setFromString(o, value);
}
private void escapeCommasAndAppend(StringBuilder builder, Object o) {
String s = o.toString();
//replace backslash to double backslash
s = s.replace("\\", "\\\\");
//replace comma to backslash+comma
s = s.replace(",", "\\,");
builder.append(s);
}
}
|
package org.voltdb.utils;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import org.voltdb.CLIConfig;
import org.voltdb.VoltTable;
import org.voltdb.VoltType;
import org.voltdb.client.Client;
import org.voltdb.client.ClientConfig;
import org.voltdb.client.ClientFactory;
import org.voltdb.client.ClientResponse;
import org.voltdb.client.ProcedureCallback;
import au.com.bytecode.opencsv_voltpatches.CSVParser;
import au.com.bytecode.opencsv_voltpatches.CSVReader;
/**
* CSVLoader is a simple utility to load data from a CSV formatted file to a
* table (or pass it to any stored proc, but ignoring any result other than the
* success code.).
*/
public class CSVLoader {
private static final AtomicLong inCount = new AtomicLong(0);
private static final AtomicLong outCount = new AtomicLong(0);
private static final int reportEveryNRows = 10000;
private static final int waitSeconds = 10;
private static CSVConfig config = null;
private static long latency = -1;
private static boolean standin = false;
public static String pathInvalidrowfile = "";
public static String pathReportfile = "csvloaderReport.log";
public static String pathLogfile = "csvloaderLog.log";
private static BufferedWriter out_invaliderowfile;
private static BufferedWriter out_logfile;
private static BufferedWriter out_reportfile;
private static String insertProcedure = "";
private static Map<Long, String[]> errorInfo = new TreeMap<Long, String[]>();
private static CSVReader csvReader;
private static Client csvClient;
private static ArrayList<String> firstIds = new ArrayList<String>();
private static final class MyCallback implements ProcedureCallback {
private final long m_lineNum;
private final CSVConfig m_config;
private final String m_rowdata;
MyCallback(long lineNumber, CSVConfig cfg, String rowdata) {
m_lineNum = lineNumber;
m_config = cfg;
m_rowdata = rowdata;
}
@Override
public void clientCallback(ClientResponse response) throws Exception {
if (response.getStatus() != ClientResponse.SUCCESS) {
System.err.println(response.getStatusString());
synchronized (errorInfo) {
if (!errorInfo.containsKey(m_lineNum)) {
String[] info = { m_rowdata, response.getStatusString() };
errorInfo.put(m_lineNum, info);
}
if (errorInfo.size() >= m_config.maxerrors) {
System.err
.println("The number of Failure row data exceeds "
+ m_config.maxerrors);
close_cleanup();
System.exit(1);
}
}
return;
}
long currentCount = inCount.incrementAndGet();
if (currentCount % reportEveryNRows == 0) {
System.out.println("Inserted " + currentCount + " rows");
}
}
}
private static class CSVConfig extends CLIConfig {
@Option(shortOpt = "f", desc = "directory path to produce report files.")
String file = "";
@Option(shortOpt = "p", desc = "procedure name to insert the data into the database.")
String procedure = "";
@Option(desc = "maximum rows to be read of the csv file.")
int limitrows = Integer.MAX_VALUE;
@Option(shortOpt = "r", desc = "directory path to produce report files.")
String reportdir = "./";
@Option(desc = "stop the process after NUMBER confirmed failures. The actual number of failures may be much higher.")
int maxerrors = 100;
@Option(desc = "the delimiter to use for separating entries.")
char separator = CSVParser.DEFAULT_SEPARATOR;
@Option(desc = "the character to use for quoted elements.")
char quotechar = CSVParser.DEFAULT_QUOTE_CHARACTER;
@Option(desc = "the character to use for escaping a separator or quote.")
char escape = CSVParser.DEFAULT_ESCAPE_CHARACTER;
@Option(desc = "sets if characters outside the quotes are ignored.", hasArg = false)
boolean strictquotes = CSVParser.DEFAULT_STRICT_QUOTES;
@Option(desc = "the line number to skip for start reading.")
int skip = CSVReader.DEFAULT_SKIP_LINES;
@Option(desc = "the default leading whitespace behavior to use if none is supplied.", hasArg = false)
boolean nowhitespace = !CSVParser.DEFAULT_IGNORE_LEADING_WHITESPACE;
@Option(desc = "the servers to be connected")
String servers = "localhost";
@Option(shortOpt = "u", desc = "user name that is used to connect to the servers,by defalut null")
String user = "";
@Option(shortOpt = "pw", desc = "password for this user to use to connect the servers,by defalut null")
String password = "";
@Option(desc = "port to be used for the servers right now")
int port = Client.VOLTDB_SERVER_PORT;
@AdditionalArgs(desc = "insert the data into database by TABLENAME.INSERT procedure by default.")
String table = "";
@Override
public void validate() {
if (maxerrors < 0)
exitWithMessageAndUsage("abortfailurecount must be >=0");
if (procedure.equals("") && table.equals(""))
exitWithMessageAndUsage("procedure name or a table name required");
if (!procedure.equals("") && !table.equals(""))
exitWithMessageAndUsage("Only a procedure name or a table name required, pass only one please");
if (skip < 0)
exitWithMessageAndUsage("skipline must be >= 0");
if (limitrows > Integer.MAX_VALUE)
exitWithMessageAndUsage("limitrows to read must be < "
+ Integer.MAX_VALUE);
if (port < 0)
exitWithMessageAndUsage("port number must be >= 0");
}
@Override
public void printUsage() {
System.out
.println("Semantics: csvloader [args] tablename or csvloader [args] -p procedurename");
super.printUsage();
}
}
public static void main(String[] args) throws IOException,
InterruptedException {
long start = System.currentTimeMillis();
int waits = 0;
int shortWaits = 0;
CSVConfig cfg = new CSVConfig();
cfg.parse(CSVLoader.class.getName(), args);
config = cfg;
configuration();
try {
if (CSVLoader.standin)
csvReader = new CSVReader(new BufferedReader(
new InputStreamReader(System.in)), config.separator,
config.quotechar, config.escape, config.skip,
config.strictquotes, config.nowhitespace);
else
csvReader = new CSVReader(new FileReader(config.file),
config.separator, config.quotechar, config.escape,
config.skip, config.strictquotes, config.nowhitespace);
} catch (FileNotFoundException e) {
System.err.println("CSV file '" + config.file
+ "' could not be found.");
System.exit(1);
}
// Split server list
String[] serverlist = config.servers.split(",");
// Create connection
ClientConfig c_config = new ClientConfig(config.user, config.password);
c_config.setProcedureCallTimeout(0); // Set procedure all to infinite
// timeout, see ENG-2670
try {
csvClient = CSVLoader.getClient(c_config, serverlist, config.port);
} catch (Exception e) {
System.err.println("Error to connect to the servers:"
+ config.servers);
System.exit(1);
}
try {
ProcedureCallback cb = null;
boolean lastOK = true;
String line[] = null;
int columnCnt = 0;
VoltTable procInfo = null;
try {
procInfo = csvClient.callProcedure("@SystemCatalog",
"PROCEDURECOLUMNS").getResults()[0];
while (procInfo.advanceRow()) {
if (insertProcedure.matches((String) procInfo.get(
"PROCEDURE_NAME", VoltType.STRING)))
columnCnt++;
}
} catch (Exception e) {
e.printStackTrace();
}
while ((config.limitrows
&& (line = csvReader.readNext()) != null) {
outCount.incrementAndGet();
boolean queued = false;
while (queued == false) {
StringBuilder linedata = new StringBuilder();
for (int i = 0; i < line.length; i++) {
linedata.append(line[i]);
if (i != line.length - 1)
linedata.append(",");
}
String[] correctedLine = line;
cb = new MyCallback(outCount.get(), config,
linedata.toString());
String lineCheckResult;
if ((lineCheckResult = checkparams_trimspace(correctedLine,
columnCnt)) != null) {
synchronized (errorInfo) {
if (!errorInfo.containsKey(outCount.get())) {
String[] info = { linedata.toString(),
lineCheckResult };
errorInfo.put(outCount.get(), info);
}
if (errorInfo.size() >= config.maxerrors) {
System.err
.println("The number of Failure row data exceeds "
+ config.maxerrors);
close_cleanup();
System.exit(1);
}
}
break;
}
queued = csvClient.callProcedure(cb, insertProcedure,
(Object[]) correctedLine);
firstIds.add(correctedLine[0]);
if (queued == false) {
++waits;
if (lastOK == false) {
++shortWaits;
}
Thread.sleep(waitSeconds);
}
lastOK = queued;
}
}
csvClient.drain();
} catch (Exception e) {
e.printStackTrace();
}
System.out.println("Inserted " + outCount.get() + " and acknowledged "
+ inCount.get() + " rows (final)");
if (waits > 0) {
System.out.println("Waited " + waits + " times");
if (shortWaits > 0) {
System.out.println("Waited too briefly? " + shortWaits
+ " times");
}
}
latency = System.currentTimeMillis() - start;
System.out.println("CSVLoader elaspsed: " + latency / 1000F
+ " seconds");
produceFiles();
close_cleanup();
}
public static ArrayList<String> getFirstIds() {
return firstIds;
}
private static String checkparams_trimspace(String[] slot,
int columnCnt) {
if (slot.length == 1 && slot[0].equals("")) {
return "Error: blank line";
}
if (slot.length != columnCnt) {
return "Error: # of attributes do not match, # of attributes needed: "
+ columnCnt
+ "# of attributes inputed: "
+ slot.length;
}
for (int i = 0; i < slot.length; i++) {
// trim white space in this line.
slot[i] = slot[i].trim();
if ((slot[i]).equals("NULL") || slot[i].equals("\\N")
|| !config.strictquotes && slot[i].equals("\"\\N\""))
slot[i] = null;
}
return null;
}
private static void configuration() {
if (config.file.equals(""))
standin = true;
if (!config.table.equals("")) {
insertProcedure = config.table + ".insert";
} else {
insertProcedure = config.procedure;
}
if (!config.reportdir.endsWith("/"))
config.reportdir += "/";
try {
File dir = new File(config.reportdir);
if (!dir.exists()) {
dir.mkdirs();
}
} catch (Exception x) {
System.err.println(x.getMessage());
x.printStackTrace();
System.exit(1);
}
String myinsert = insertProcedure;
myinsert = myinsert.replaceAll("\\.", "_");
pathInvalidrowfile = config.reportdir + myinsert + "_"
+ "csvloaderinvalidrows.csv";
pathLogfile = config.reportdir + myinsert + "_" + "csvloaderLog.log";
pathReportfile = config.reportdir + myinsert + "_"
+ "csvloaderReport.log";
try {
out_invaliderowfile = new BufferedWriter(new FileWriter(
pathInvalidrowfile));
out_logfile = new BufferedWriter(new FileWriter(pathLogfile));
out_reportfile = new BufferedWriter(new FileWriter(pathReportfile));
} catch (IOException e) {
System.err.println(e.getMessage());
System.exit(1);
}
}
private static Client getClient(ClientConfig config, String[] servers,
int port) throws Exception {
final Client client = ClientFactory.createClient(config);
for (String server : servers)
client.createConnection(server.trim(), port);
return client;
}
private static void produceFiles() {
int bulkflush = 300; // by default right now
try {
long linect = 0;
for (Long irow : errorInfo.keySet()) {
String info[] = errorInfo.get(irow);
if (info.length != 2)
System.out
.println("internal error, infomation is not enough");
linect++;
out_invaliderowfile.write(info[0] + "\n");
String message = "invalid line " + irow + ": " + info[0]
+ "\n";
System.err.print(message);
out_logfile.write(message + info[1] + "\n");
if (linect % bulkflush == 0) {
out_invaliderowfile.flush();
out_logfile.flush();
}
}
// Get elapsed time in seconds
float elapsedTimeSec = latency / 1000F;
out_reportfile.write("CSVLoader elaspsed: " + elapsedTimeSec
+ " seconds\n");
out_reportfile.write("Number of tuples tring to insert:"
+ outCount.get() + "\n");
out_reportfile.write("Number of failed tuples:" + errorInfo.size()
+ "\n");
out_reportfile.write("Number of acknowledged tuples: "
+ inCount.get() + "\n");
out_reportfile.write("CSVLoader rate: " + outCount.get()
/ elapsedTimeSec + " row/s\n");
System.out.println("invalid row file is generated to:"
+ pathInvalidrowfile + "\n" + "log file is generated to:"
+ pathLogfile + "\n" + "report file is generated to:"
+ pathReportfile);
out_invaliderowfile.flush();
out_logfile.flush();
out_reportfile.flush();
} catch (FileNotFoundException e) {
System.err.println("CSV report directory '" + config.reportdir
+ "' does not exist.");
} catch (Exception x) {
System.err.println(x.getMessage());
}
}
private static void close_cleanup() throws IOException,
InterruptedException {
inCount.set(0);
outCount.set(0);
errorInfo.clear();
csvReader.close();
csvClient.close();
out_invaliderowfile.close();
out_logfile.close();
out_reportfile.close();
}
}
|
package ca.corefacility.bioinformatics.irida.ria.utilities;
import java.text.SimpleDateFormat;
/**
* This class is responsible for holding the formats of different Strings (e.g. Dates)
*
* @author Josh Adam <josh.adam@phac-aspc.gc.ca>
*/
public interface Formats {
/**
* Default format for {@link java.util.Date} in DataTables
*/
public static final SimpleDateFormat DATE = new SimpleDateFormat("dd MMM yyyy");
}
|
package com.akiban.server.types3.mcompat.mtypes;
import com.akiban.server.types3.common.BigDecimalWrapper;
import java.math.BigDecimal;
import java.math.MathContext;
import java.math.RoundingMode;
public class MBigDecimalWrapper implements BigDecimalWrapper {
public static final MBigDecimalWrapper ZERO = new MBigDecimalWrapper(BigDecimal.ZERO);
private BigDecimal value;
public MBigDecimalWrapper(BigDecimal value) {
this.value = value;
}
public MBigDecimalWrapper(String num)
{
value = new BigDecimal(num);
}
public MBigDecimalWrapper(long val)
{
value = BigDecimal.valueOf(val);
}
public MBigDecimalWrapper()
{
value = BigDecimal.ZERO;
}
@Override
public void reset() {
value = BigDecimal.ZERO;
}
@Override
public BigDecimalWrapper add(BigDecimalWrapper other) {
value = value.add(other.asBigDecimal());
return this;
}
@Override
public BigDecimalWrapper subtract(BigDecimalWrapper other) {
value = value.subtract(other.asBigDecimal());
return this;
}
@Override
public BigDecimalWrapper multiply(BigDecimalWrapper other) {
value = value.multiply(other.asBigDecimal());
return this;
}
@Override
public BigDecimalWrapper divide(BigDecimalWrapper other) {
value = value.divide(other.asBigDecimal());
return this;
}
@Override
public BigDecimalWrapper ceil() {
value = value.setScale(0, RoundingMode.CEILING);
return this;
}
@Override
public BigDecimalWrapper floor() {
value = value.setScale(0, RoundingMode.FLOOR);
return this;
}
@Override
public BigDecimalWrapper truncate(int scale) {
value = value.setScale(scale, RoundingMode.DOWN);
return this;
}
@Override
public BigDecimalWrapper round(int scale) {
value = value.setScale(scale, RoundingMode.HALF_UP);
return this;
}
@Override
public int getSign() {
return value.signum();
}
@Override
public BigDecimalWrapper divide(BigDecimalWrapper augend, int scale)
{
value = value.divide(augend.asBigDecimal(),
scale,
RoundingMode.UP);
return this;
}
@Override
public BigDecimalWrapper divideToIntegeralValue (BigDecimalWrapper augend)
{
value = value.divideToIntegralValue(augend.asBigDecimal());
return this;
}
@Override
public BigDecimalWrapper abs()
{
value = value.abs();
return this;
}
@Override
public int getScale()
{
return value.scale();
}
@Override
public int getPrecision()
{
return value.precision();
}
@Override
public BigDecimalWrapper parseString(String num)
{
value = new BigDecimal (num);
return this;
}
@Override
public int compareTo(BigDecimalWrapper o)
{
return value.compareTo(o.asBigDecimal());
}
@Override
public BigDecimalWrapper round(int precision, int scale)
{
value = value.round(new MathContext(precision, RoundingMode.HALF_UP));
return this;
}
@Override
public BigDecimalWrapper negate()
{
value = value.negate();
return this;
}
@Override
public BigDecimal asBigDecimal() {
return value;
}
@Override
public boolean isZero()
{
return value.signum() == 0;
}
@Override
public BigDecimalWrapper mod(BigDecimalWrapper num)
{
value = value.remainder(num.asBigDecimal());
return this;
}
@Override
public String toString() {
return value == null ? "UNSET" : value.toString();
}
}
|
package com.antelink.sourcesquare.client.scan;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.TreeSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.antelink.sourcesquare.TreemapNode;
import com.antelink.sourcesquare.event.base.EventBus;
import com.antelink.sourcesquare.event.events.ErrorEvent;
import com.antelink.sourcesquare.event.events.FilesIdentifiedEvent;
import com.antelink.sourcesquare.event.events.HiddenFileFoundEvent;
import com.antelink.sourcesquare.event.events.ScanCompleteEvent;
import com.antelink.sourcesquare.event.events.StartScanEvent;
import com.antelink.sourcesquare.event.handlers.FilesIdentifiedEventHandler;
import com.antelink.sourcesquare.event.handlers.StartScanEventHandler;
import com.antelink.sourcesquare.results.TreeMapBuilder;
public class SourceSquareFSWalker {
private final static Log logger = LogFactory.getLog(SourceSquareFSWalker.class);
private static final int MAX_FILE_PER_QUERY = 400;
private static final int COMPUTE_WAIT_TIME = 1000;
private final EventBus eventBus;
private final TreeMapBuilder treemap;
private int filePerQuery = 2;
private final ArrayList<ProcessWorker> workers = new ArrayList<ProcessWorker>();
/*contains number of folders per level*/
private final ArrayList<Integer> levels;
private final Object lock;
private long total = 0;
public SourceSquareFSWalker(SourceSquareEngine engine, EventBus eventBus, TreeMapBuilder treemap) {
this.eventBus = eventBus;
this.levels = new ArrayList<Integer>();
this.lock = new Object();
this.treemap = treemap;
this.workers.add(new ProcessWorker(0, engine, this.lock));
this.workers.add(new ProcessWorker(1, engine, this.lock));
this.workers.add(new ProcessWorker(2, engine, this.lock));
}
public void bind() {
this.eventBus.addHandler(StartScanEvent.TYPE, new StartScanEventHandler() {
@Override
public String getId() {
return getClass().getCanonicalName() + ": " + StartScanEventHandler.class.getName();
}
@Override
public void handle(File toScan) {
logger.info("Counting files...");
SourceSquareFSWalker.this.identifyFiles(toScan);
}
});
this.eventBus.addHandler(FilesIdentifiedEvent.TYPE, new FilesIdentifiedEventHandler() {
@Override
public String getId() {
return getClass().getCanonicalName() + ": " + StartScanEventHandler.class.getName();
}
@Override
public void handle(TreeSet<File> fileSet) {
try {
logger.info("Start scan for " + fileSet.size() + " files");
ScanStatus.INSTANCE.start();
SourceSquareFSWalker.this.queryFiles(fileSet);
} catch (Exception e) {
logger.debug("Error handling tree identification", e);
}
}
});
}
public synchronized void queryFiles(TreeSet<File> fileSet) throws InterruptedException {
HashMap<String, String> toAnalyze = new HashMap<String, String>();
Iterator<File> iterator = fileSet.iterator();
logger.debug(fileSet.size() + " files to analyze");
long count = 0;
long timer = System.currentTimeMillis();
while (iterator.hasNext()) {
File file = iterator.next();
logger.trace("adding analyze file to the pool: " + file.getAbsolutePath());
try {
String sha1 = FileAnalyzer.calculateHash("SHA-1", file);
toAnalyze.put(file.getAbsolutePath(), sha1);
count++;
} catch (Exception e) {
logger.error("skipping files " + file, e);
}
if (toAnalyze.size() == this.filePerQuery
|| System.currentTimeMillis() - timer > COMPUTE_WAIT_TIME) {
// dispatch analysis
timer = System.currentTimeMillis();
analyzeMap(toAnalyze);
this.filePerQuery = Math.min(MAX_FILE_PER_QUERY, this.filePerQuery * 2);
logger.trace("new counter: " + count);
}
}
if (!toAnalyze.isEmpty()) {
analyzeMap(toAnalyze);
}
while (!allProcessDone()) {
synchronized (this.lock) {
this.lock.wait();
}
}
this.eventBus.fireEvent(new ScanCompleteEvent(this.levels));
logger.info("Analysis done " + count);
}
private synchronized void analyzeMap(HashMap<String, String> tempMap)
throws InterruptedException {
ProcessWorker worker = null;
while ((worker = getAvailableProcessor()) == null) {
synchronized (this.lock) {
this.lock.wait();
}
}
worker.process(new HashMap<String, String>(tempMap));
tempMap.clear();
}
public void identifyFiles(File directory) {
logger.debug("counting files for: " + directory.getAbsolutePath());
TreeSet<File> fileSet = new TreeSet<File>();
try {
TreemapNode root = reccursiveIdentifyFiles(directory, fileSet, 0);
this.treemap.setRoot(root);
this.eventBus.fireEvent(new FilesIdentifiedEvent(fileSet));
} catch (OutOfMemoryError e) {
this.eventBus
.fireEvent(new ErrorEvent(
"Out of memory: Try again with a smaller directory\nor change your JVM parameters."));
}
}
private TreemapNode reccursiveIdentifyFiles(File directory, TreeSet<File> fileSet, int depth) {
if (this.levels.size() < depth + 1) {
this.levels.add(0);
}
logger.trace("Counting going down to directory : " + directory.getAbsolutePath());
if (directory.isHidden()) {
this.eventBus.fireEvent(new HiddenFileFoundEvent(directory));
return null;
}
if (directory.isFile()) {
fileSet.add(directory);
return null;
}
if (!directory.isDirectory()) {
return null;
}
// Protection if the directory forbids listing
if (directory.listFiles() == null) {
return null;
}
Set<TreemapNode> children = new HashSet<TreemapNode>();
int nbFiles = 0;
int nbDirs = 0;
for (File child : directory.listFiles()) {
if (child.isDirectory() || !child.isHidden()) {
if (child.isDirectory()) {
nbDirs++;
TreemapNode childNode = reccursiveIdentifyFiles(child, fileSet, depth + 1);
if (childNode != null) {
children.add(childNode);
}
} else if (child.isFile()) {
nbFiles++;
fileSet.add(child);
}
}
}
this.total = this.total + nbFiles;
ScanStatus.INSTANCE.setNbFilesToScan((int) this.total);
this.levels.set(depth, this.levels.get(depth) + nbDirs);
return this.treemap.createTreeMapNode(directory.getAbsolutePath(), children, nbFiles);
}
private ProcessWorker getAvailableProcessor() {
for (ProcessWorker worker : this.workers) {
if (worker.isAvailable()) {
return worker;
}
}
return null;
}
private boolean allProcessDone() {
for (ProcessWorker worker : this.workers) {
if (!worker.isAvailable()) {
return false;
}
}
return true;
}
}
|
package com.blackberry.kafka.lowoverhead.consumer;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.zip.CRC32;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.blackberry.kafka.lowoverhead.Constants;
import com.blackberry.kafka.lowoverhead.compression.Decompressor;
import com.blackberry.kafka.lowoverhead.compression.GzipDecompressor;
import com.blackberry.kafka.lowoverhead.compression.SnappyDecompressor;
public class MessageSetReader {
private static final Logger LOG = LoggerFactory
.getLogger(MessageSetReader.class);
private boolean ready = false;
private byte[] bytes = new byte[0];
private ByteBuffer buffer = ByteBuffer.wrap(bytes);
// This starts at 1KiB, and doubles as necessary. I doubt it will need to do
// so often, unless message sizes keep growing in an unbounded way.
private byte[] decompressionBytes = new byte[1024];
private CRC32 crc32 = new CRC32();
private SnappyDecompressor snappyDecompressor = null;
private GzipDecompressor gzipDecompressor = null;
private MessageSetReader messageSetReader = null;
public void init(byte[] src, int position, int length) {
if (bytes.length < length) {
bytes = new byte[length];
buffer = ByteBuffer.wrap(bytes);
}
System.arraycopy(src, position, bytes, 0, length);
buffer.clear();
buffer.limit(length);
if (length > 0) {
ready = true;
} else {
ready = false;
}
}
private long offset;
private int messageSize;
private int crc;
private byte magicByte;
private byte attributes;
private byte compression;
private int keyLength;
private int valueLength;
private int bytesCopied;
private int decompressedSize;
public int getMessage(byte[] dest, int pos, int maxLength) throws IOException {
if (messageSetReader != null && messageSetReader.isReady()) {
bytesCopied = messageSetReader.getMessage(dest, pos, maxLength);
if (!messageSetReader.isReady() && !buffer.hasRemaining()) {
ready = false;
} else {
ready = true;
}
} else {
// There are occasional truncated messages. If we don't have enough,
// then
// return -1 and go not-ready
// This will cover the offset, message size and crc
if (buffer.remaining() < 8 + 4) {
ready = false;
return -1;
}
// offset
offset = buffer.getLong();
// messageSize
messageSize = buffer.getInt();
// This should be the last size check we need to do.
if (buffer.remaining() < messageSize) {
ready = false;
return -1;
}
// Crc => int32
crc = buffer.getInt();
// check that the crc is correct
crc32.reset();
crc32.update(bytes, buffer.position(), messageSize - 4);
if (crc != (int) crc32.getValue()) {
LOG.error("CRC value mismatch.");
ready = false;
return -1;
}
// MagicByte => int8
magicByte = buffer.get();
if (magicByte != Constants.MAGIC_BYTE) {
LOG.error("Incorrect magic byte.");
ready = false;
return -1;
}
// Attributes => int8
attributes = buffer.get();
compression = (byte) (attributes & Constants.COMPRESSION_MASK);
// Key => bytes
keyLength = buffer.getInt();
if (keyLength == -1) {
// null key
} else {
// ignore the key
buffer.position(buffer.position() + keyLength);
}
// Value => bytes
valueLength = buffer.getInt();
if (valueLength == -1) {
// null value. return -1, but we may still be ready.
if (!buffer.hasRemaining()) {
ready = false;
}
return -1;
}
if (compression == Constants.NO_COMPRESSION) {
bytesCopied = Math.min(maxLength, valueLength);
if (bytesCopied < valueLength) {
LOG.warn("Truncating message from {} to {} bytes.", valueLength,
maxLength);
}
System.arraycopy(bytes, buffer.position(), dest, pos, bytesCopied);
} else if (compression == Constants.SNAPPY) {
decompressedSize = decompress(getSnappyDecompressor());
ensureMessageSetReader();
messageSetReader.init(decompressionBytes, 0, decompressedSize);
if (messageSetReader.isReady()) {
bytesCopied = messageSetReader.getMessage(dest, pos, maxLength);
}
} else if (compression == Constants.GZIP) {
decompressedSize = decompress(getGzipDecompressor());
ensureMessageSetReader();
messageSetReader.init(decompressionBytes, 0, decompressedSize);
if (messageSetReader.isReady()) {
bytesCopied = messageSetReader.getMessage(dest, pos, maxLength);
}
}
buffer.position(buffer.position() + valueLength);
if ((messageSetReader == null || !messageSetReader.isReady())
&& !buffer.hasRemaining()) {
ready = false;
} else {
ready = true;
}
}
return bytesCopied;
}
private int decompress(Decompressor decompressor) throws IOException {
while (true) {
decompressedSize = decompressor.decompress(bytes, buffer.position(),
valueLength, decompressionBytes, 0, decompressionBytes.length);
if (decompressedSize == -1
|| decompressedSize == decompressionBytes.length) {
// we got back the maximum number of bytes we would accept. Most likely,
// this means there is more data that we can take. So increase our
// buffers and retry. This should be very rare.
decompressionBytes = new byte[2 * decompressionBytes.length];
} else {
// we didn't fill the buffer. So our buffer was big enough.
break;
}
}
return decompressedSize;
}
private void ensureMessageSetReader() {
if (messageSetReader == null) {
messageSetReader = new MessageSetReader();
}
}
private SnappyDecompressor getSnappyDecompressor() {
if (snappyDecompressor == null) {
snappyDecompressor = new SnappyDecompressor();
}
return snappyDecompressor;
}
private GzipDecompressor getGzipDecompressor() {
if (gzipDecompressor == null) {
gzipDecompressor = new GzipDecompressor();
}
return gzipDecompressor;
}
public boolean isReady() {
return ready;
}
public long getOffset() {
return offset;
}
public long getNextOffset() {
return offset + 1;
}
}
|
package com.codebetyars.skyhussars.engine.physics;
import com.jme3.math.FastMath;
import com.jme3.math.Quaternion;
import com.jme3.math.Vector3f;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SymmetricAirfoil implements Airfoil {
private final static Logger logger = LoggerFactory.getLogger(SymmetricAirfoil.class);
public SymmetricAirfoil(String name, Vector3f cog, float wingArea, float incidence, float aspectRatio, boolean damper, float dehidralDegree, Aileron.Direction direction) {
this.wingArea = wingArea;
this.incidence = incidence;// * FastMath.DEG_TO_RAD;
this.cog = cog;
this.name = name;
this.aspectRatio = aspectRatio;
this.qIncidence = qIncidence.fromAngles((-incidence) * FastMath.DEG_TO_RAD, 0, 0);
this.dehidral = new Quaternion().fromAngleAxis(dehidralDegree * FastMath.DEG_TO_RAD, Vector3f.UNIT_Z);//vertical ? Vector3f.UNIT_X : Vector3f.UNIT_Y;
wingRotation = qIncidence.mult(dehidral);
logger.debug(name + " pointing to " + qIncidence.mult(dehidral).mult(Vector3f.UNIT_Y));
this.damper = damper;
if (damper) {
if (this.cog.dot(Vector3f.UNIT_X) < 0) {
leftDamper = true;
} else {
leftDamper = false;
}
}
this.direction = direction;
}
private final int[] constAoa = {0, 2, 4, 6, 8, 10, 15, 30};
private final float[] clm05 = {0f, 0.246f, 0.475f, 0.68f, 0.775f, 0.795f, 0.82f, 0.8f};
// private float[]
private final float wingArea;
private final float incidence;
private final Vector3f cog;
private final String name;
private final float aspectRatio;
private Quaternion qIncidence = new Quaternion();
private final Quaternion dehidral;
private final Quaternion wingRotation;
private final float dampingFactor = 2f;
private final boolean damper;
private boolean leftDamper;
private Aileron.Direction direction;
@Override
public Aileron.Direction direction(){return direction;}
private void logging(Vector3f vLift, Vector3f vUp, float angleOfAttack, Vector3f vInducedDrag) {
String direction = "up";
if (vLift.normalize().dot(vUp) < 0) {
direction = "down";
}
logger.debug(name + " at " + angleOfAttack + " degrees generated " + direction + "forces: vLift " + vLift.length() + ", induced drag " + vInducedDrag.length());
}
public Vector3f damp(Vector3f vFlow, Vector3f vAngularVelocity, Vector3f vUp) {
float zDamping = vAngularVelocity.z * cog.length() * dampingFactor;
float xDamping = vAngularVelocity.x * cog.length() * 1f;
float yDamping = vAngularVelocity.y * cog.length() * 1f;
if (damper && leftDamper) {
vFlow = vFlow.add(vUp.mult(zDamping));
} else if (damper && !leftDamper) {
vFlow = vFlow.add(vUp.mult(zDamping).negate());
}
switch(direction){
case HORIZONTAL_STABILIZER : vFlow = vFlow.add(vUp.mult(xDamping).negate()); break;
case VERTICAL_STABILIZER : vFlow = vFlow.add(vUp.mult(yDamping).negate()); break;
}
return vFlow;
}
public Vector3f lift(float airDensity, Vector3f vFlow, Vector3f vUp) {
float aoa = aoa(vUp, vFlow.normalize());
float scLift = calculateLift(aoa, airDensity, vFlow);
Vector3f direction = vFlow.cross(vUp).cross(vFlow).normalize();
if (aoa < 0) {
direction = direction.negate();
}
return direction.mult(scLift);
}
public float calculateLift(float angleOfAttack, float airDensity, Vector3f vFlow) {
//abs is used for symmetric wings? not perfect
return 0.5f * airDensity * getLiftCoefficient(FastMath.abs(angleOfAttack)) * wingArea * vFlow.lengthSquared();
}
public float getLiftCoefficient(float angleOfAttack) {
float liftCoefficient = 0f;
for (int i = 1; i < constAoa.length; i++) {
if (angleOfAttack < constAoa[i]) {
/*let's approximate the real values with interpolation*/
float diff = constAoa[i] - constAoa[i - 1];
float real = angleOfAttack - constAoa[i - 1];
float a = real / diff;
float b = 1f - a;
liftCoefficient = clm05[i] * a + clm05[i - 1] * b;
break;
}
}
return liftCoefficient;
}
public Vector3f inducedDrag(float airDensity, Vector3f vFlow, Vector3f vLift) {
float dividened = (0.5f * airDensity * aspectRatio * vFlow.lengthSquared() * FastMath.PI * wingArea);
//logger.debug("Airdensity: " + airDensity + ", Velocity: " + vVelocity.length() + ", lift: " + vLift.length() + );
if (dividened == 0) {
return Vector3f.ZERO;
}
float scInducedDrag = (vLift.lengthSquared()) / dividened;
return vFlow.normalize().mult(scInducedDrag);
}
public float calculateInducedDrag(float airDensity, Vector3f vVelocity) {
throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates.
}
@Override
public Vector3f getCenterOfGravity() {
return cog;
}
public String getName() {
return name;
}
private float aoa(Vector3f vUp, Vector3f vFlow) {
float angleOfAttack = vFlow.cross(vUp).cross(vFlow).normalize().angleBetween(vUp) * FastMath.RAD_TO_DEG;
float np = vUp.dot(vFlow);
if (np < 0) {
angleOfAttack = -angleOfAttack;
}
return angleOfAttack;
}
Vector3f linearAcceleration = Vector3f.ZERO;
Vector3f torque = Vector3f.ZERO;
@Override
public Airfoil tick(float airDensity, Vector3f vFlow, Vector3f vAngularVelocity) {
Vector3f vUp = wingRotation.mult(Vector3f.UNIT_Y).normalize();
vFlow = damp(vFlow, vAngularVelocity, vUp);
Vector3f lift = lift(airDensity, vFlow, vUp);
Vector3f inducedDrag = inducedDrag(airDensity, vFlow, lift);
synchronized(this) {
linearAcceleration = lift.add(inducedDrag);
torque = cog.cross(linearAcceleration);
}
return this;
}
@Override public synchronized Vector3f linearAcceleration() {return linearAcceleration;}
@Override public synchronized Vector3f torque() {return torque;}
}
|
package com.comandante.creeper.server;
import com.comandante.creeper.Main;
import com.comandante.creeper.managers.GameManager;
import com.comandante.creeper.managers.SentryManager;
import com.google.common.base.Optional;
import org.apache.log4j.Logger;
import org.jboss.netty.channel.*;
import static com.comandante.creeper.server.Color.RESET;
public class CreeperAuthenticationHandler extends SimpleChannelUpstreamHandler {
private final GameManager gameManager;
private final CreeperAuthenticator creeperAuthenticator;
private static final Logger log = Logger.getLogger(CreeperAuthenticationHandler.class);
private static final String LOGO = " .-\"\"\"\"\"\"\"-.\n" +
" { _____}
" { / ( o\\\n" +
" { / \\ \\V\n" +
" { | _\\ \\.\n" +
" { | / '-' \\ HAPPY TURKEY DAY !!\n" +
" {___\\ /\\______/ __/ ~~~~~~~~~~~~~~~~~\n" +
" ~~/ / /____
" '
" '\\_ \\_\n" +
" _Dana'97_ /\\ /\\\n";
public CreeperAuthenticationHandler(GameManager gameManager) {
this.gameManager = gameManager;
this.creeperAuthenticator = new GameAuth(gameManager);
}
@Override
public void handleUpstream(ChannelHandlerContext ctx, ChannelEvent e) throws Exception {
if (e instanceof ChannelStateEvent) {
System.out.println("Upstream Handling: " + e);
}
super.handleUpstream(ctx, e);
}
@Override
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder
.append(LOGO)
.append(RESET + "\r\n")
.append("First time here? Type \"tupac\".\r\n")
.append("username: ");
e.getChannel().write(stringBuilder.toString());
CreeperSession creeperSession = new CreeperSession();
creeperSession.setState(CreeperSession.State.promptedForUsername);
ctx.setAttachment(creeperSession);
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) throws Exception {
CreeperSession creeperSession = (CreeperSession) ctx.getAttachment();
if (!creeperSession.isAuthed()) {
if (creeperSession.state.equals(CreeperSession.State.newUserPromptedForUsername) || creeperSession.state.equals(CreeperSession.State.newUserPromptedForPassword)) {
gameManager.getNewUserRegistrationManager().handle(creeperSession, e);
if (!creeperSession.state.equals(CreeperSession.State.newUserRegCompleted)) {
return;
}
}
doAuthentication(ctx, e);
if (creeperSession.isAuthed()) {
gameManager.getPlayerManager().getSessionManager().putSession(creeperSession);
e.getChannel().getPipeline().remove(this);
e.getChannel().getPipeline().addLast("server_handler", new CreeperCommandHandler(gameManager));
e.getChannel().setAttachment(creeperSession);
gameManager.announceConnect(creeperSession.getUsername().get());
gameManager.currentRoomLogic(Main.createPlayerId(creeperSession.getUsername().get()));
gameManager.getChannelUtils().write(Main.createPlayerId(creeperSession.getUsername().get()), "\r\n" + gameManager.buildPrompt(Main.createPlayerId(creeperSession.getUsername().get())));
}
} else {
//gameManager.getPlayerManager().getSessionManager().putSession(creeperSession);
e.getChannel().getPipeline().addLast("server_handler", new CreeperCommandHandler(gameManager));
e.getChannel().getPipeline().remove(this);
e.getChannel().setAttachment(creeperSession);
}
super.messageReceived(ctx, e);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
log.error("Authentication failure.", e.getCause());
SentryManager.logSentry(this.getClass(), e.getCause(), "Authentication failure.");
}
private void doAuthentication(ChannelHandlerContext ctx, MessageEvent e) {
String message = (String) e.getMessage();
CreeperSession creeperSession = (CreeperSession) ctx.getAttachment();
if (creeperSession.getState().equals(CreeperSession.State.promptedForUsername)) {
creeperSession.setUsername(Optional.of(message.replaceAll("[^a-zA-Z0-9]", "")));
if (creeperSession.getUsername().isPresent() && creeperSession.getUsername().get().equals("tupac")) {
gameManager.getNewUserRegistrationManager().newUserRegistrationFlow(creeperSession, e);
return;
}
creeperSession.setState(CreeperSession.State.promptedForPassword);
e.getChannel().write("password: ");
return;
}
if (creeperSession.getState().equals(CreeperSession.State.promptedForPassword)) {
creeperSession.setPassword(Optional.of(message));
}
boolean b = creeperAuthenticator.authenticateAndRegisterPlayer(creeperSession.getUsername().get(), creeperSession.getPassword().get(), e.getChannel());
if (!b) {
e.getChannel().write("authentication failed.\r\n");
e.getChannel().write("username: ");
creeperSession.setState(CreeperSession.State.promptedForUsername);
} else {
creeperSession.setAuthed(true);
creeperSession.setState(CreeperSession.State.authed);
e.getChannel().write("Welcome to creeper. (version: " + Main.getCreeperVersion() + ")\r\n");
}
}
}
|
package com.continuuity.data.operation.ttqueue;
import com.continuuity.api.data.OperationException;
import com.continuuity.api.data.OperationResult;
import com.continuuity.common.conf.CConfiguration;
import com.continuuity.common.utils.ImmutablePair;
import com.continuuity.data.operation.StatusCode;
import com.continuuity.data.operation.executor.ReadPointer;
import com.continuuity.data.operation.executor.omid.TransactionOracle;
import com.continuuity.data.operation.ttqueue.internal.CachedList;
import com.continuuity.data.operation.ttqueue.internal.EntryMeta;
import com.continuuity.data.table.VersionedColumnarTable;
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicLong;
public class TTQueueNewOnVCTable implements TTQueue {
private static final Logger LOG = LoggerFactory.getLogger(TTQueueNewOnVCTable.class);
protected final VersionedColumnarTable table;
private final byte [] queueName;
final TransactionOracle oracle;
static final int MAX_CRASH_DEQUEUE_TRIES = 15;
// For testing
AtomicLong dequeueReturns = new AtomicLong(0);
/*
for each queue (global):
global entry id counter for newest (highest) entry id, incremented during enqueue
row-key | column | value
<queueName>10I | 10I | <entryId>
data and meta data (=entryState) for each entry (together in one row per entry)
(GLOBAL_DATA_PREFIX)
row-key | column | value
<queueName>20D<entryId> | 20D | <data>
| 10M | <entryState>
| 30H | <header data>
for each group of consumers (= each group of flowlet instances):
group read pointer for highest entry id processed by group of consumers
row-key | column | value
<queueName>10I<groupId> | 10I | <entryId>
for each consumer(=flowlet instance)
state of entry ids processed by consumer (one column per entry id), current active entry and consumer read pointer
(CONSUMER_META_PREFIX)
row-key | column | value
<queueName>30C<groupId><consumerId> | 10A | <entryId>
| 20C | <crash retries for active entry>
| 30I | <entryId>
*/
// Row prefix names and flags
static final byte [] GLOBAL_ENTRY_ID_PREFIX = {10, 'I'}; //row <queueName>10I
static final byte [] GLOBAL_DATA_PREFIX = {20, 'D'}; //row <queueName>20D
static final byte [] CONSUMER_META_PREFIX = {30, 'C'}; //row <queueName>30C
// Columns for row = GLOBAL_ENTRY_ID_PREFIX
static final byte [] GLOBAL_ENTRYID_COUNTER = {10, 'I'}; //newest (highest) entry id per queue (global)
// Columns for row = GLOBAL_DATA_PREFIX
static final byte [] ENTRY_META = {10, 'M'}; //row <queueName>20D<entryId>, column 10M
static final byte [] ENTRY_DATA = {20, 'D'}; //row <queueName>20D<entryId>, column 20D
static final byte [] ENTRY_HEADER = {30, 'H'}; //row <queueName>20D<entryId>, column 30H
static final byte [] GROUP_READ_POINTER = {10, 'I'}; //row <queueName>10I<groupId>, column 10I
// Columns for row = CONSUMER_META_PREFIX
static final byte [] ACTIVE_ENTRY = {10, 'A'}; //row <queueName>30C<groupId><consumerId>, column 10A
static final byte [] ACTIVE_ENTRY_CRASH_TRIES = {20, 'C'}; //row <queueName>30C<groupId><consumerId>, column 20C
static final byte [] CONSUMER_READ_POINTER = {30, 'I'}; //row <queueName>30C<groupId><consumerId>, column 30I
static final byte [] CLAIMED_ENTRY_BEGIN = {40, 'I'}; //row <queueName>30C<groupId><consumerId>, column 40I
static final byte [] CLAIMED_ENTRY_END = {50, 'I'}; //row <queueName>30C<groupId><consumerId>, column 50I
static final long INVALID_ENTRY_ID = -1;
static final long FIRST_QUEUE_ENTRY_ID = 1;
final long DEFAULT_BATCH_SIZE;
protected TTQueueNewOnVCTable(VersionedColumnarTable table, byte[] queueName, TransactionOracle oracle,
final CConfiguration conf) {
this.table = table;
this.queueName = queueName;
this.oracle = oracle;
this.DEFAULT_BATCH_SIZE = conf.getLong("ttqueue.batch.size.default", 100) > 0 ?
conf.getLong("ttqueue.batch.size.default", 100) : 100;
}
private long getBatchSize(QueueConfig queueConfig) {
if(queueConfig.getBatchSize() > 0) {
return queueConfig.getBatchSize();
}
return DEFAULT_BATCH_SIZE;
}
@Override
public EnqueueResult enqueue(QueueEntry entry, long cleanWriteVersion) throws OperationException {
byte[] data = entry.getData();
if (LOG.isTraceEnabled()) {
logTrace("Enqueueing (data.len=" + data.length + ", writeVersion=" + cleanWriteVersion + ")");
}
// Get our unique entry id
long entryId;
try {
// Make sure the increment below uses increment operation of the underlying implementation directly
// so that it is atomic (Eg. HBase increment operation)
entryId = this.table.incrementAtomicDirtily(makeRowName(GLOBAL_ENTRY_ID_PREFIX), GLOBAL_ENTRYID_COUNTER, 1);
} catch (OperationException e) {
throw new OperationException(StatusCode.INTERNAL_ERROR,
String.format("Queue-%s: Increment of global entry id failed with status code %d : %s",
Bytes.toString(queueName), e.getStatus(), e.getMessage()), e);
}
if (LOG.isTraceEnabled()) {
logTrace("New enqueue got entry id " + entryId);
}
/*
Insert entry with version=<cleanWriteVersion> and
row-key = <queueName>20D<entryId> , column/value 20D/<data>, 10M/EntryState.VALID, 30H<partitionKey>/<hashValue>
*/
final int size = entry.getPartitioningMap().size() + 2;
byte[][] colKeys = new byte[size][];
byte[][] colValues = new byte[size][];
int colKeyIndex = 0;
int colValueIndex = 0;
colKeys[colKeyIndex++] = ENTRY_DATA;
colKeys[colKeyIndex++] = ENTRY_META;
colValues[colValueIndex++] = data;
colValues[colValueIndex++] = new EntryMeta(EntryMeta.EntryState.VALID).getBytes();
for(Map.Entry<String, Integer> e : entry.getPartitioningMap().entrySet()) {
colKeys[colKeyIndex++] = makeColumnName(ENTRY_HEADER, e.getKey());
colValues[colValueIndex++] = Bytes.toBytes(e.getValue());
}
this.table.put(makeRowKey(GLOBAL_DATA_PREFIX, entryId),
colKeys,
cleanWriteVersion,
colValues);
// Return success with pointer to entry
return new EnqueueResult(EnqueueResult.EnqueueStatus.SUCCESS, new QueueEntryPointer(this.queueName, entryId));
}
@Override
public void invalidate(QueueEntryPointer entryPointer, long cleanWriteVersion) throws OperationException {
if(LOG.isTraceEnabled()) {
logTrace(String.format("Invalidating entry ", entryPointer.getEntryId()));
}
final byte [] rowName = makeRowKey(GLOBAL_DATA_PREFIX, entryPointer.getEntryId());
// Change meta data to INVALID
this.table.put(rowName, ENTRY_META,
cleanWriteVersion, new EntryMeta(EntryMeta.EntryState.INVALID).getBytes());
// No need to delete data/headers since they will be cleaned up during eviction later
if (LOG.isTraceEnabled()) {
logTrace("Invalidated " + entryPointer);
}
}
@Override
public DequeueResult dequeue(QueueConsumer consumer, ReadPointer readPointer) throws OperationException {
final QueueConfig config = consumer.getQueueConfig();
if (LOG.isTraceEnabled()) {
logTrace("Attempting dequeue [curNumDequeues=" + this.dequeueReturns.get() +
"] (" + consumer + ", " + config + ", " + readPointer + ")");
}
// Determine what dequeue strategy to use based on the partitioner
final DequeueStrategy dequeueStrategy = getDequeueStrategy(config.getPartitionerType().getPartitioner());
// If QueueState is null, read the queue state from underlying storage.
QueueStateImpl queueState = getQueueStateImpl(consumer.getQueueState());
if(queueState == null) {
queueState = dequeueStrategy.constructQueueState(consumer, config, readPointer);
consumer.setQueueState(queueState);
}
// If the previous entry was not acked, return the same one (Note: will need to change for async mode)
if(queueState.getActiveEntryId() != INVALID_ENTRY_ID) {
if(!queueState.getCachedEntries().hasCurrent()) {
throw new OperationException(StatusCode.INTERNAL_ERROR,
String.format("Queue-%s: Cannot fetch active entry id from cached entries", Bytes.toString(queueName)));
}
QueueStateEntry cachedEntry = queueState.getCachedEntries().getCurrent();
QueueEntry entry = new QueueEntry(cachedEntry.getData());
dequeueStrategy.saveDequeueState(consumer, config, queueState, readPointer);
DequeueResult dequeueResult = new DequeueResult(DequeueResult.DequeueStatus.SUCCESS,
new QueueEntryPointer(this.queueName, cachedEntry.getEntryId()), entry);
return dequeueResult;
}
// If no more cached entries, read entries from storage
if(!queueState.getCachedEntries().hasNext()) {
List<Long> entryIds = dequeueStrategy.fetchNextEntries(consumer, config, queueState, readPointer);
readEntries(consumer, config, queueState, readPointer, entryIds);
}
if(queueState.getCachedEntries().hasNext()) {
QueueStateEntry cachedEntry = queueState.getCachedEntries().getNext();
this.dequeueReturns.incrementAndGet();
queueState.setActiveEntryId(cachedEntry.getEntryId());
queueState.setConsumerReadPointer(cachedEntry.getEntryId());
QueueEntry entry = new QueueEntry(cachedEntry.getData());
dequeueStrategy.saveDequeueState(consumer, config, queueState, readPointer);
DequeueResult dequeueResult = new DequeueResult(DequeueResult.DequeueStatus.SUCCESS,
new QueueEntryPointer(this.queueName, cachedEntry.getEntryId()), entry);
return dequeueResult;
} else {
// No queue entries available to dequue, return queue empty
if (LOG.isTraceEnabled()) {
logTrace("End of queue reached using " + "read pointer " + readPointer);
}
dequeueStrategy.saveDequeueState(consumer, config, queueState, readPointer);
DequeueResult dequeueResult = new DequeueResult(DequeueResult.DequeueStatus.EMPTY);
return dequeueResult;
}
}
private DequeueStrategy getDequeueStrategy(QueuePartitioner queuePartitioner) throws OperationException {
DequeueStrategy dequeueStrategy;
if(queuePartitioner instanceof QueuePartitioner.HashPartitioner) {
dequeueStrategy = new HashDequeueStrategy();
} else if(queuePartitioner instanceof QueuePartitioner.RoundRobinPartitioner) {
dequeueStrategy = new RoundRobinDequeueStrategy();
} else if(queuePartitioner instanceof QueuePartitioner.FifoPartitioner) {
dequeueStrategy = new FifoDequeueStrategy();
} else {
throw new OperationException(StatusCode.INTERNAL_ERROR,
String.format("Queue-%s: Cannot figure out the dequeue strategy to use for partitioner %s",
Bytes.toString(queueName), queuePartitioner.getClass()));
}
return dequeueStrategy;
}
private void readEntries(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState, ReadPointer readPointer,
List<Long> entryIds) throws OperationException{
if(LOG.isTraceEnabled()) {
logTrace(String.format("Reading entries from storage - ", Arrays.toString(entryIds.toArray())));
}
if(entryIds.isEmpty()) {
queueState.setCachedEntries(CachedList.EMPTY_LIST);
return;
}
final byte[][] entryRowKeys = new byte[entryIds.size()][];
for(int i = 0; i < entryIds.size(); ++i) {
entryRowKeys[i] = makeRowKey(GLOBAL_DATA_PREFIX, entryIds.get(i));
}
final byte[][] entryColKeys = new byte[][]{ ENTRY_META, ENTRY_DATA };
OperationResult<Map<byte[], Map<byte[], byte[]>>> entriesResult =
this.table.get(entryRowKeys, entryColKeys, readPointer);
if(entriesResult.isEmpty()) {
queueState.setCachedEntries(CachedList.EMPTY_LIST);
} else {
List<QueueStateEntry> entries = new ArrayList<QueueStateEntry>(entryIds.size());
for(int i = 0; i < entryIds.size(); ++i) {
Map<byte[], byte[]> entryMap = entriesResult.getValue().get(entryRowKeys[i]);
if(entryMap == null) {
queueState.setCachedEntries(CachedList.EMPTY_LIST);
return;
}
byte[] entryMetaBytes = entryMap.get(ENTRY_META);
if(entryMetaBytes == null) {
queueState.setCachedEntries(CachedList.EMPTY_LIST);
return;
}
EntryMeta entryMeta = EntryMeta.fromBytes(entryMetaBytes);
if (LOG.isTraceEnabled()) {
logTrace("entryId:" + entryIds.get(i) + ". entryMeta : " + entryMeta.toString());
}
// Check if entry has been invalidated or evicted
if (entryMeta.isInvalid() || entryMeta.isEvicted()) {
if (LOG.isTraceEnabled()) {
logTrace("Found invalidated or evicted entry at " + entryIds.get(i) +
" (" + entryMeta.toString() + ")");
}
} else {
// Entry is visible and valid!
assert(entryMeta.isValid());
byte [] entryData = entryMap.get(ENTRY_DATA);
entries.add(new QueueStateEntry(entryData, entryIds.get(i)));
}
}
queueState.setCachedEntries(new CachedList<QueueStateEntry>(entries));
}
}
@Override
public void ack(QueueEntryPointer entryPointer, QueueConsumer consumer, ReadPointer readPointer)
throws OperationException {
// TODO: 1. Later when active entry can saved in memory, there is no need to write it into HBase
// TODO: 2. Need to treat Ack as a simple write operation so that it can use a simple write rollback for unack
// TODO: 3. Use Transaction.getWriteVersion instead ReadPointer
QueuePartitioner partitioner = consumer.getQueueConfig().getPartitionerType().getPartitioner();
final DequeueStrategy dequeueStrategy = getDequeueStrategy(partitioner);
// Get queue state
QueueStateImpl queueState = getQueueStateImpl(consumer.getQueueState());
if(queueState == null) {
queueState = dequeueStrategy.constructQueueState(consumer, consumer.getQueueConfig(), readPointer);
consumer.setQueueState(queueState);
}
// Only the entry that has been dequeued (active entry) can be acked
if(queueState.getActiveEntryId() != entryPointer.getEntryId()) {
throw new OperationException(StatusCode.ILLEGAL_ACK, String.format(
"Queue-%s: Entry %d is not the active entry. Current active entry is %d", Bytes.toString(queueName),
entryPointer.getEntryId(), queueState.getActiveEntryId()));
}
// Set ack state
queueState.setActiveEntryId(INVALID_ENTRY_ID);
queueState.setActiveEntryTries(0);
// Write ack state
dequeueStrategy.saveDequeueState(consumer, consumer.getQueueConfig(), queueState, readPointer);
}
@Override
public void finalize(QueueEntryPointer entryPointer, QueueConsumer consumer, int totalNumGroups, long writePoint)
throws OperationException {
// TODO: Evict queue entries
}
@Override
public void unack(QueueEntryPointer entryPointer, QueueConsumer consumer, ReadPointer readPointer)
throws OperationException {
// TODO: 1. Later when active entry can saved in memory, there is no need to write it into HBase
// TODO: 2. Need to treat Ack as a simple write operation so that it can use a simple write rollback for unack
// TODO: 3. Ack gets rolled back with tries=0. Need to fix this by fixing point 2 above.
QueuePartitioner partitioner = consumer.getQueueConfig().getPartitionerType().getPartitioner();
final DequeueStrategy dequeueStrategy = getDequeueStrategy(partitioner);
// Get queue state
QueueStateImpl queueState = getQueueStateImpl(consumer.getQueueState());
if(queueState == null) {
queueState = dequeueStrategy.constructQueueState(consumer, consumer.getQueueConfig(), readPointer);
consumer.setQueueState(queueState);
}
// Set unack state
queueState.setActiveEntryId(entryPointer.getEntryId());
queueState.setActiveEntryTries(0);
// Write unack state
dequeueStrategy.saveDequeueState(consumer, consumer.getQueueConfig(), queueState, readPointer);
}
static long groupId = 0;
@Override
public long getGroupID() throws OperationException {
// TODO: implement this :)
return ++groupId;
}
@Override
public QueueAdmin.QueueInfo getQueueInfo() throws OperationException {
// TODO: implement this :)
return null;
}
private QueueStateImpl getQueueStateImpl(QueueState queueState) throws OperationException {
if(queueState == null) {
return null;
}
if(! (queueState instanceof QueueStateImpl)) {
throw new OperationException(StatusCode.INTERNAL_ERROR,
String.format("Queue-%s: Don't know how to use QueueState class %s", Bytes.toString(queueName), queueState.getClass()));
}
return (QueueStateImpl) queueState;
}
protected ImmutablePair<ReadPointer, Long> dirtyPointer() {
return new ImmutablePair<ReadPointer,Long>(oracle.dirtyReadPointer(), oracle.dirtyWriteVersion());
}
protected byte[] makeRowName(byte[] bytesToAppendToQueueName) {
return Bytes.add(this.queueName, bytesToAppendToQueueName);
}
protected byte[] makeRowKey(byte[] bytesToAppendToQueueName, long id1) {
return Bytes.add(this.queueName, bytesToAppendToQueueName, Bytes.toBytes(id1));
}
protected byte[] makeRowKey(byte[] bytesToAppendToQueueName, long id1, int id2) {
return Bytes.add(
Bytes.add(this.queueName, bytesToAppendToQueueName, Bytes.toBytes(id1)), Bytes.toBytes(id2));
}
protected byte[] makeColumnName(byte[] bytesToPrependToId, long id) {
return Bytes.add(bytesToPrependToId, Bytes.toBytes(id));
}
protected byte[] makeColumnName(byte[] bytesToPrependToId, String id) {
return Bytes.add(bytesToPrependToId, Bytes.toBytes(id));
}
protected void logTrace(String message) {
LOG.trace(String.format("Queue-%s: %s", Bytes.toString(queueName), message));
}
public static class QueueStateImpl implements QueueState {
private long activeEntryId = INVALID_ENTRY_ID;
private int activeEntryTries = 0;
private long consumerReadPointer = FIRST_QUEUE_ENTRY_ID - 1;
private long queueWrtiePointer = FIRST_QUEUE_ENTRY_ID - 1;
private long claimedEntryBegin = INVALID_ENTRY_ID;
private long claimedEntryEnd = INVALID_ENTRY_ID;
private CachedList<QueueStateEntry> cachedEntries;
public QueueStateImpl() {
cachedEntries = CachedList.emptyList();
}
public long getActiveEntryId() {
return activeEntryId;
}
public void setActiveEntryId(long activeEntryId) {
this.activeEntryId = activeEntryId;
}
public int getActiveEntryTries() {
return activeEntryTries;
}
public void setActiveEntryTries(int activeEntryTries) {
this.activeEntryTries = activeEntryTries;
}
public long getConsumerReadPointer() {
return consumerReadPointer;
}
public void setConsumerReadPointer(long consumerReadPointer) {
this.consumerReadPointer = consumerReadPointer;
}
public long getClaimedEntryBegin() {
return claimedEntryBegin;
}
public void setClaimedEntryBegin(long claimedEntryBegin) {
this.claimedEntryBegin = claimedEntryBegin;
}
public long getClaimedEntryEnd() {
return claimedEntryEnd;
}
public void setClaimedEntryEnd(long claimedEntryEnd) {
this.claimedEntryEnd = claimedEntryEnd;
}
public long getQueueWritePointer() {
return queueWrtiePointer;
}
public void setQueueWritePointer(long queueWritePointer) {
this.queueWrtiePointer = queueWritePointer;
}
public CachedList<QueueStateEntry> getCachedEntries() {
return cachedEntries;
}
public void setCachedEntries(CachedList<QueueStateEntry> cachedEntries) {
this.cachedEntries = cachedEntries;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("activeEntryId", activeEntryId)
.add("activeEntryTries", activeEntryTries)
.add("consumerReadPointer", consumerReadPointer)
.add("claimedEntryBegin", claimedEntryBegin)
.add("claimedEntryEnd", claimedEntryEnd)
.add("queueWritePointer", queueWrtiePointer)
.add("cachedEntries", cachedEntries.toString())
.toString();
}
}
private static class QueueStateStore {
private final VersionedColumnarTable table;
private byte[] rowKey;
private final List<byte[]> columnNames = Lists.newArrayList();
private final List<byte[]> columnValues = Lists.newArrayList();
private OperationResult<Map<byte[], byte[]>> readResult;
private QueueStateStore(VersionedColumnarTable table) {
this.table = table;
}
public byte[] getRowKey() {
return rowKey;
}
public void setRowKey(byte[] rowKey) {
this.rowKey = rowKey;
}
public void addColumnName(byte[] columnName) {
columnNames.add(columnName);
}
public void addColumnValue(byte[] columnValue) {
columnValues.add(columnValue);
}
public void read(ReadPointer readPointer)
throws OperationException{
final byte[][] colNamesByteArray = new byte[columnNames.size()][];
readResult = table.get(rowKey, columnNames.toArray(colNamesByteArray), readPointer);
}
public OperationResult<Map<byte[], byte[]>> getReadResult() {
return this.readResult;
}
public void write(ReadPointer readPointer)
throws OperationException {
final byte[][] colNamesByteArray = new byte[columnNames.size()][];
final byte[][] colValuesByteArray = new byte[columnValues.size()][];
table.put(rowKey, columnNames.toArray(colNamesByteArray), readPointer.getMaximum(), columnValues.toArray(colValuesByteArray));
}
}
interface DequeueStrategy {
QueueStateImpl constructQueueState(QueueConsumer consumer, QueueConfig config,
ReadPointer readPointer) throws OperationException;
List<Long> fetchNextEntries(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState,
ReadPointer readPointer) throws OperationException;
void saveDequeueState(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState,
ReadPointer readPointer) throws OperationException;
}
abstract class AbstractDequeueStrategy implements DequeueStrategy {
protected final QueueStateStore readQueueStateStore = new QueueStateStore(table);
protected final QueueStateStore writeQueueStateStore = new QueueStateStore(table);
@Override
public QueueStateImpl constructQueueState(QueueConsumer consumer, QueueConfig config, ReadPointer readPointer)
throws OperationException {
// ACTIVE_ENTRY contains the entry if any that is dequeued, but not acked
// CONSUMER_READ_POINTER + 1 points to the next entry that can be read by this queue consuemr
readQueueStateStore.setRowKey(makeRowKey(CONSUMER_META_PREFIX, consumer.getGroupId(), consumer.getInstanceId()));
readQueueStateStore.addColumnName(ACTIVE_ENTRY);
readQueueStateStore.addColumnName(ACTIVE_ENTRY_CRASH_TRIES);
readQueueStateStore.addColumnName(CONSUMER_READ_POINTER);
readQueueStateStore.read(readPointer);
OperationResult<Map<byte[], byte[]>> stateBytes = readQueueStateStore.getReadResult();
QueueStateImpl queueState = new QueueStateImpl();
if(!stateBytes.isEmpty()) {
queueState.setActiveEntryId(Bytes.toLong(stateBytes.getValue().get(ACTIVE_ENTRY)));
queueState.setActiveEntryTries(Bytes.toInt(stateBytes.getValue().get(ACTIVE_ENTRY_CRASH_TRIES)));
byte[] consumerReadPointerBytes = stateBytes.getValue().get(CONSUMER_READ_POINTER);
if(consumerReadPointerBytes != null) {
queueState.setConsumerReadPointer(Bytes.toLong(consumerReadPointerBytes));
}
}
// Read queue write pointer
// TODO: use raw Get instead of the workaround of incrementing zero
long queueWritePointer = table.incrementAtomicDirtily(makeRowName(GLOBAL_ENTRY_ID_PREFIX), GLOBAL_ENTRYID_COUNTER, 0);
queueState.setQueueWritePointer(queueWritePointer);
// If active entry is present, read that from storage
// This is the crash recovery case, the consumer has stopped processing before acking the previous dequeue
if(queueState.getActiveEntryId() != INVALID_ENTRY_ID) {
if(queueState.getActiveEntryTries() < MAX_CRASH_DEQUEUE_TRIES) {
queueState.setActiveEntryTries(queueState.getActiveEntryTries() + 1);
readEntries(consumer, config, queueState, readPointer, Collections.singletonList(queueState.getActiveEntryId()));
// Set the active entry as the current entry
queueState.getCachedEntries().getNext();
} else {
// TODO: what do we do with the active entry?
if(LOG.isTraceEnabled()) {
logTrace(String.format("Ignoring dequeue of entry %d after %d tries", queueState.getActiveEntryId(), MAX_CRASH_DEQUEUE_TRIES));
}
queueState.setActiveEntryId(INVALID_ENTRY_ID);
queueState.setActiveEntryTries(0);
}
}
if(LOG.isTraceEnabled()) {
logTrace(String.format("Constructed new QueueState - %s", queueState));
}
return queueState;
}
@Override
public void saveDequeueState(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState,
ReadPointer readPointer) throws OperationException {
// Persist the queue state of this consumer
writeQueueStateStore.setRowKey(makeRowKey(CONSUMER_META_PREFIX, consumer.getGroupId(), consumer.getInstanceId()));
writeQueueStateStore.addColumnName(CONSUMER_READ_POINTER);
writeQueueStateStore.addColumnValue(Bytes.toBytes(queueState.getConsumerReadPointer()));
writeQueueStateStore.addColumnName(ACTIVE_ENTRY);
writeQueueStateStore.addColumnValue(Bytes.toBytes(queueState.getActiveEntryId()));
writeQueueStateStore.addColumnName(ACTIVE_ENTRY_CRASH_TRIES);
writeQueueStateStore.addColumnValue(Bytes.toBytes(queueState.getActiveEntryTries()));
writeQueueStateStore.write(readPointer);
}
}
class HashDequeueStrategy extends AbstractDequeueStrategy implements DequeueStrategy {
@Override
public List<Long> fetchNextEntries(
QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState, ReadPointer readPointer) throws OperationException {
long entryId = queueState.getConsumerReadPointer();
QueuePartitioner partitioner=config.getPartitionerType().getPartitioner();
List<Long> newEntryIds = new ArrayList<Long>();
outerLoop:
while (newEntryIds.isEmpty()) {
if(entryId >= queueState.getQueueWritePointer()) {
// Reached the end of queue as per cached QueueWritePointer,
// read it again to see if there is any progress made by producers
// TODO: use raw Get instead of the workaround of incrementing zero
long queueWritePointer = table.incrementAtomicDirtily(makeRowName(GLOBAL_ENTRY_ID_PREFIX), GLOBAL_ENTRYID_COUNTER, 0);
queueState.setQueueWritePointer(queueWritePointer);
if(LOG.isTraceEnabled()) {
logTrace(String.format("New queueWritePointer = %d", queueWritePointer));
}
// If still no progress, return empty queue
if(entryId >= queueState.getQueueWritePointer()) {
return Collections.EMPTY_LIST;
}
}
final long batchSize = getBatchSize(config);
long startEntryId = entryId + 1;
long endEntryId =
startEntryId + (batchSize * consumer.getGroupSize()) < queueState.getQueueWritePointer() ?
startEntryId + (batchSize * consumer.getGroupSize()) : queueState.getQueueWritePointer();
// Read header data from underlying storage, if any
final int cacheSize = (int)(endEntryId - startEntryId + 1);
final String partitioningKey = consumer.getPartitioningKey();
if(partitioningKey == null || partitioningKey.isEmpty()) {
throw new OperationException(StatusCode.INTERNAL_ERROR,
String.format("Queue-%s: Using Hash Partitioning with null/empty partitioningKey.", Bytes.toString(queueName)));
}
final byte [][] rowKeys = new byte[cacheSize][];
for(int id = 0; id < cacheSize; ++id) {
rowKeys[id] = makeRowKey(GLOBAL_DATA_PREFIX, startEntryId + id);
}
final byte[][] columnKeys = new byte[1][];
columnKeys[0] = makeColumnName(ENTRY_HEADER, partitioningKey);
OperationResult<Map<byte[], Map<byte[], byte[]>>> headerResult = table.get(rowKeys, columnKeys, readPointer);
// Determine which entries need to be read from storage
for(int id = 0; id < cacheSize; ++id) {
final long currentEntryId = startEntryId + id;
if (!headerResult.isEmpty()) {
Map<byte[], Map<byte[], byte[]>> headerValue = headerResult.getValue();
Map<byte[], byte[]> headerMap = headerValue.get(rowKeys[id]);
if(headerMap == null) {
break outerLoop;
}
byte[] hashBytes = headerMap.get(columnKeys[0]);
if(hashBytes == null) {
break outerLoop;
}
int hashValue = Bytes.toInt(hashBytes);
if(partitioner.shouldEmit(consumer, currentEntryId, hashValue)) {
newEntryIds.add(currentEntryId);
}
} else {
// Not able to read header
break outerLoop;
}
}
entryId = endEntryId;
}
return newEntryIds;
}
}
class RoundRobinDequeueStrategy extends AbstractDequeueStrategy implements DequeueStrategy {
@Override
public List<Long> fetchNextEntries(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState, ReadPointer readPointer) throws OperationException {
long entryId = queueState.getConsumerReadPointer();
QueuePartitioner partitioner=config.getPartitionerType().getPartitioner();
List<Long> newEntryIds = new ArrayList<Long>();
while (newEntryIds.isEmpty()) {
if(entryId >= queueState.getQueueWritePointer()) {
// Reached the end of queue as per cached QueueWritePointer,
// read it again to see if there is any progress made by producers
// TODO: use raw Get instead of the workaround of incrementing zero
long queueWritePointer = table.incrementAtomicDirtily(makeRowName(GLOBAL_ENTRY_ID_PREFIX), GLOBAL_ENTRYID_COUNTER, 0);
queueState.setQueueWritePointer(queueWritePointer);
// If still no progress, return empty queue
if(entryId >= queueState.getQueueWritePointer()) {
return Collections.EMPTY_LIST;
}
}
final long batchSize = getBatchSize(config);
long startEntryId = entryId + 1;
long endEntryId =
startEntryId + (batchSize * consumer.getGroupSize()) < queueState.getQueueWritePointer() ?
startEntryId + (batchSize * consumer.getGroupSize()) : queueState.getQueueWritePointer();
final int cacheSize = (int)(endEntryId - startEntryId + 1);
// Determine which entries need to be read from storage
for(int id = 0; id < cacheSize; ++id) {
final long currentEntryId = startEntryId + id;
if(partitioner.shouldEmit(consumer, currentEntryId)) {
newEntryIds.add(currentEntryId);
}
}
entryId = endEntryId;
}
return newEntryIds;
}
}
class FifoDequeueStrategy extends AbstractDequeueStrategy implements DequeueStrategy {
@Override
public QueueStateImpl constructQueueState(QueueConsumer consumer, QueueConfig config, ReadPointer readPointer) throws OperationException {
readQueueStateStore.addColumnName(CLAIMED_ENTRY_BEGIN);
readQueueStateStore.addColumnName(CLAIMED_ENTRY_END);
// Read claimed entry Ids
QueueStateImpl queueState = super.constructQueueState(consumer, config, readPointer);
OperationResult<Map<byte[], byte[]>> stateBytes = readQueueStateStore.getReadResult();
if(!stateBytes.isEmpty()) {
long claimedEntryIdBegin = Bytes.toLong(stateBytes.getValue().get(CLAIMED_ENTRY_BEGIN));
long claimedEntryIdEnd = Bytes.toLong(stateBytes.getValue().get(CLAIMED_ENTRY_END));
if(claimedEntryIdBegin != INVALID_ENTRY_ID && claimedEntryIdEnd != INVALID_ENTRY_ID) {
queueState.setClaimedEntryBegin(claimedEntryIdBegin);
queueState.setClaimedEntryEnd(claimedEntryIdEnd);
}
}
return queueState;
}
@Override
public void saveDequeueState(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState, ReadPointer readPointer) throws OperationException {
// If a claimed entry is now being dequeued then increment CLAIMED_ENTRY_BEGIN
if(queueState.getActiveEntryId() == queueState.getClaimedEntryBegin()) {
// If reached end of claimed entries, then reset the claimed ids
if(queueState.getClaimedEntryBegin() == queueState.getClaimedEntryEnd()) {
queueState.setClaimedEntryBegin(INVALID_ENTRY_ID);
queueState.setClaimedEntryEnd(INVALID_ENTRY_ID);
} else {
queueState.setClaimedEntryBegin(queueState.getClaimedEntryBegin() + 1);
}
}
writeQueueStateStore.addColumnName(CLAIMED_ENTRY_BEGIN);
writeQueueStateStore.addColumnValue(Bytes.toBytes(queueState.getClaimedEntryBegin()));
writeQueueStateStore.addColumnName(CLAIMED_ENTRY_END);
writeQueueStateStore.addColumnValue(Bytes.toBytes(queueState.getClaimedEntryEnd()));
super.saveDequeueState(consumer, config, queueState, readPointer);
}
@Override
public List<Long> fetchNextEntries(QueueConsumer consumer, QueueConfig config, QueueStateImpl queueState, ReadPointer readPointer) throws OperationException {
List<Long> newEntryIds = new ArrayList<Long>();
// If claimed entries exist, return them
long claimedEntryIdBegin = queueState.getClaimedEntryBegin();
long claimedEntryIdEnd = queueState.getClaimedEntryEnd();
if(claimedEntryIdBegin != INVALID_ENTRY_ID && claimedEntryIdEnd != INVALID_ENTRY_ID &&
claimedEntryIdEnd >= claimedEntryIdBegin) {
for(long i = claimedEntryIdBegin; i <= claimedEntryIdEnd; ++i) {
newEntryIds.add(i);
}
return newEntryIds;
}
final long batchSize = getBatchSize(config);
// Else claim new queue entries to process
QueuePartitioner partitioner=config.getPartitionerType().getPartitioner();
while (newEntryIds.isEmpty()) {
// TODO: use raw Get instead of the workaround of incrementing zero
// TODO: move counters into oracle
long groupReadPointetr = table.incrementAtomicDirtily(makeRowKey(GROUP_READ_POINTER, consumer.getGroupId()), GROUP_READ_POINTER, 0);
if(groupReadPointetr + batchSize >= queueState.getQueueWritePointer()) {
// Reached the end of queue as per cached QueueWritePointer,
// read it again to see if there is any progress made by producers
// TODO: use raw Get instead of the workaround of incrementing zero
// TODO: move counters into oracle
long queueWritePointer = table.incrementAtomicDirtily(makeRowName(GLOBAL_ENTRY_ID_PREFIX), GLOBAL_ENTRYID_COUNTER, 0);
queueState.setQueueWritePointer(queueWritePointer);
}
// End of queue reached
if(groupReadPointetr >= queueState.getQueueWritePointer()) {
return Collections.EMPTY_LIST;
}
// If there are enough entries for all consumers to claim, then claim batchSize entries
// Otherwise divide the entries equally among all consumers
long curBatchSize = groupReadPointetr + (batchSize * consumer.getGroupSize()) < queueState.getQueueWritePointer() ?
batchSize : (queueState.getQueueWritePointer() - groupReadPointetr) / consumer.getGroupSize();
// Make sure there is progress
if(curBatchSize < 1) {
curBatchSize = 1;
}
long endEntryId = table.incrementAtomicDirtily(makeRowKey(GROUP_READ_POINTER, consumer.getGroupId()),
GROUP_READ_POINTER, curBatchSize);
long startEntryId = endEntryId - curBatchSize + 1;
// Note: incrementing GROUP_READ_POINTER, and storing the claimed entryIds in HBase ideally need to happen atomically.
// HBase doesn't support atomic increment and put.
// Also, for performance reasons we have moved the write to method saveDequeueEntryState where all writes for a dequeue happen
queueState.setClaimedEntryBegin(startEntryId);
queueState.setClaimedEntryEnd(endEntryId);
final int cacheSize = (int)(endEntryId - startEntryId + 1);
// Determine which entries need to be read from storage based on partition type
for(int id = 0; id < cacheSize; ++id) {
final long currentEntryId = startEntryId + id;
if(partitioner.shouldEmit(consumer, currentEntryId)) {
newEntryIds.add(currentEntryId);
}
}
}
return newEntryIds;
}
}
}
|
package com.elastic.support.diagnostics.commands;
import com.elastic.support.SystemProperties;
import com.elastic.support.diagnostics.DiagnosticContext;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.compress.compressors.CompressorOutputStream;
import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream;
import org.apache.commons.compress.utils.IOUtils;
import java.io.*;
import java.util.zip.GZIPOutputStream;
public class ArchiveResultsCmd extends AbstractDiagnosticCmd {
public boolean execute(DiagnosticContext context) {
logger.info("Archiving diagnostic results.");
try {
String dir = context.getTempDir();
File srcDir = new File(dir);
String filename = dir + "-" + SystemProperties.getFileDateString() + ".tar.bz2";
FileOutputStream fout = new FileOutputStream(filename);
//GZIPOutputStream gzout = new GZIPOutputStream(fout);
CompressorOutputStream cout = new GzipCompressorOutputStream(fout);
TarArchiveOutputStream taos = new TarArchiveOutputStream(cout);
taos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_STAR);
taos.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
archiveResults(taos, srcDir, "", true);
taos.close();
logger.info("Archive: " + filename + " was created");
} catch (Exception ioe) {
logger.error("Couldn't create archive.\n", ioe);
}
return true;
}
public void archiveResults(TarArchiveOutputStream taos, File file, String path, boolean append) {
boolean pathSet = false;
String relPath = "";
try {
if (append) {
relPath = path + "/" + file.getName() + "-" + SystemProperties.getFileDateString();
} else {
relPath = path + "/" + file.getName();
}
TarArchiveEntry tae = new TarArchiveEntry(file, relPath);
taos.putArchiveEntry(tae);
if (file.isFile()) {
BufferedInputStream bis = new BufferedInputStream(new FileInputStream(file));
IOUtils.copy(bis, taos);
taos.closeArchiveEntry();
bis.close();
} else if (file.isDirectory()) {
taos.closeArchiveEntry();
for (File childFile : file.listFiles()) {
archiveResults(taos, childFile, relPath, false);
}
}
} catch (IOException e) {
logger.error("Archive Error", e);
}
}
}
|
package com.fangxuele.tool.wechat.push.ui.listener;
import com.fangxuele.tool.wechat.push.ui.Init;
import com.fangxuele.tool.wechat.push.ui.MainWindow;
import com.fangxuele.tool.wechat.push.util.DbUtilMySQL;
import com.fangxuele.tool.wechat.push.util.SystemUtil;
import com.xiaoleilu.hutool.log.Log;
import com.xiaoleilu.hutool.log.LogFactory;
import javax.swing.*;
import javax.swing.table.DefaultTableModel;
import java.io.File;
import java.sql.Connection;
import java.util.Map;
public class SettingListener {
private static final Log logger = LogFactory.get();
public static void addListeners() {
MainWindow.mainWindow.getAutoCheckUpdateCheckBox().addActionListener(e -> {
Init.configer.setAutoCheckUpdate(MainWindow.mainWindow.getAutoCheckUpdateCheckBox().isSelected());
Init.configer.save();
});
MainWindow.mainWindow.getSettingMpInfoSaveButton().addActionListener(e -> {
try {
Init.configer.setWechatAppId(MainWindow.mainWindow.getWechatAppIdTextField().getText());
Init.configer.setWechatAppSecret(new String(MainWindow.mainWindow.getWechatAppSecretPasswordField().getPassword()));
Init.configer.setWechatToken(new String(MainWindow.mainWindow.getWechatTokenPasswordField().getPassword()));
Init.configer.setWechatAesKey(new String(MainWindow.mainWindow.getWechatAesKeyPasswordField().getPassword()));
Init.configer.save();
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e1) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n" + e1.getMessage(), "",
JOptionPane.ERROR_MESSAGE);
logger.error(e1);
}
});
MainWindow.mainWindow.getSettingAliInfoSaveButton().addActionListener(e -> {
try {
Init.configer.setAliServerUrl(MainWindow.mainWindow.getAliServerUrlTextField().getText());
Init.configer.setAliAppKey(new String(MainWindow.mainWindow.getAliAppKeyPasswordField().getPassword()));
Init.configer.setAliAppSecret(new String(MainWindow.mainWindow.getAliAppSecretPasswordField().getPassword()));
Init.configer.setAliSign(MainWindow.mainWindow.getAliSignTextField().getText());
Init.configer.save();
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e1) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n" + e1.getMessage(), "",
JOptionPane.ERROR_MESSAGE);
logger.error(e1);
}
});
// mysql-
MainWindow.mainWindow.getSettingTestDbLinkButton().addActionListener(e -> {
try {
DbUtilMySQL dbMySQL = DbUtilMySQL.getInstance();
String DBUrl = MainWindow.mainWindow.getMysqlUrlTextField().getText();
String DBName = MainWindow.mainWindow.getMysqlDatabaseTextField().getText();
String DBUser = MainWindow.mainWindow.getMysqlUserTextField().getText();
String DBPassword = new String(MainWindow.mainWindow.getMysqlPasswordField().getPassword());
Connection conn = dbMySQL.testConnection(DBUrl, DBName, DBUser, DBPassword);
if (conn == null) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.ERROR_MESSAGE);
} else {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.INFORMATION_MESSAGE);
}
} catch (Exception e1) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n" + e1.getMessage(), "",
JOptionPane.ERROR_MESSAGE);
logger.error(e1);
}
});
// mysql-
MainWindow.mainWindow.getSettingDbInfoSaveButton().addActionListener(e -> {
try {
Init.configer.setMysqlUrl(MainWindow.mainWindow.getMysqlUrlTextField().getText());
Init.configer.setMysqlDatabase(MainWindow.mainWindow.getMysqlDatabaseTextField().getText());
Init.configer.setMysqlUser(MainWindow.mainWindow.getMysqlUserTextField().getText());
Init.configer.setMysqlPassword(new String(MainWindow.mainWindow.getMysqlPasswordField().getPassword()));
Init.configer.save();
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e1) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n" + e1.getMessage(), "",
JOptionPane.ERROR_MESSAGE);
logger.error(e1);
}
});
MainWindow.mainWindow.getSettingAppearanceSaveButton().addActionListener(e -> {
try {
Init.configer.setTheme(MainWindow.mainWindow.getSettingThemeComboBox().getSelectedItem().toString());
Init.configer.setFont(MainWindow.mainWindow.getSettingFontNameComboBox().getSelectedItem().toString());
Init.configer.setFontSize(Integer.parseInt(MainWindow.mainWindow.getSettingFontSizeComboBox().getSelectedItem().toString()));
Init.configer.save();
Init.initTheme();
Init.initGlobalFont();
SwingUtilities.updateComponentTreeUI(MainWindow.frame);
SwingUtilities.updateComponentTreeUI(MainWindow.mainWindow.getTabbedPane());
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n\n\n", "",
JOptionPane.INFORMATION_MESSAGE);
} catch (Exception e1) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n" + e1.getMessage(), "",
JOptionPane.ERROR_MESSAGE);
logger.error(e1);
}
});
MainWindow.mainWindow.getMsgHisTableSelectAllButton().addActionListener(e -> new Thread(() -> {
DefaultTableModel tableModel = (DefaultTableModel) MainWindow.mainWindow.getMsgHistable()
.getModel();
int rowCount = tableModel.getRowCount();
for (int i = 0; i < rowCount; i++) {
tableModel.setValueAt(true, i, 0);
}
}).start());
MainWindow.mainWindow.getMsgHisTableUnselectAllButton().addActionListener(e -> new Thread(() -> {
DefaultTableModel tableModel = (DefaultTableModel) MainWindow.mainWindow.getMsgHistable()
.getModel();
int rowCount = tableModel.getRowCount();
for (int i = 0; i < rowCount; i++) {
tableModel.setValueAt(false, i, 0);
}
}).start());
MainWindow.mainWindow.getMsgHisTableDeleteButton().addActionListener(e -> new Thread(() -> {
try {
DefaultTableModel tableModel = (DefaultTableModel) MainWindow.mainWindow.getMsgHistable()
.getModel();
int rowCount = tableModel.getRowCount();
int selectedCount = 0;
for (int i = 0; i < rowCount; i++) {
boolean isSelected = (boolean) tableModel.getValueAt(i, 0);
if (isSelected) {
selectedCount++;
}
}
if (selectedCount == 0) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.INFORMATION_MESSAGE);
} else {
int isDelete = JOptionPane.showConfirmDialog(MainWindow.mainWindow.getSettingPanel(), "", "",
JOptionPane.INFORMATION_MESSAGE);
if (isDelete == JOptionPane.YES_OPTION) {
Map<String, String[]> msgMap = Init.msgHisManager.readMsgHis();
for (int i = 0; i < rowCount; ) {
boolean delete = (boolean) tableModel.getValueAt(i, 0);
if (delete) {
String msgName = (String) tableModel.getValueAt(i, 1);
if (msgMap.containsKey(msgName)) {
msgMap.remove(msgName);
File msgTemplateDataFile = new File(SystemUtil.configHome + "data"
+ File.separator + "template_data" + File.separator + msgName + ".csv");
if (msgTemplateDataFile.exists()) {
msgTemplateDataFile.delete();
}
}
tableModel.removeRow(i);
MainWindow.mainWindow.getMsgHistable().updateUI();
i = 0;
rowCount = tableModel.getRowCount();
continue;
} else {
i++;
}
}
Init.msgHisManager.writeMsgHis(msgMap);
Init.initMsgTab(null);
}
}
} catch (Exception e1) {
JOptionPane.showMessageDialog(MainWindow.mainWindow.getSettingPanel(), "\n\n" + e1.getMessage(), "",
JOptionPane.ERROR_MESSAGE);
logger.error(e1);
}
}).start());
}
}
|
package com.fasterxml.jackson.databind.deser.impl;
import java.io.IOException;
import java.lang.reflect.Member;
import java.util.*;
import com.fasterxml.jackson.databind.*;
import com.fasterxml.jackson.databind.cfg.MapperConfig;
import com.fasterxml.jackson.databind.deser.CreatorProperty;
import com.fasterxml.jackson.databind.deser.SettableBeanProperty;
import com.fasterxml.jackson.databind.deser.ValueInstantiator;
import com.fasterxml.jackson.databind.deser.std.StdValueInstantiator;
import com.fasterxml.jackson.databind.introspect.*;
import com.fasterxml.jackson.databind.util.ClassUtil;
/**
* Container class for storing information on creators (based on annotations,
* visibility), to be able to build actual instantiator later on.
*/
public class CreatorCollector
{
// Since 2.5
protected final static int C_DEFAULT = 0;
protected final static int C_STRING = 1;
protected final static int C_INT = 2;
protected final static int C_LONG = 3;
protected final static int C_DOUBLE = 4;
protected final static int C_BOOLEAN = 5;
protected final static int C_DELEGATE = 6;
protected final static int C_PROPS = 7;
protected final static int C_ARRAY_DELEGATE = 8;
protected final static String[] TYPE_DESCS = new String[] {
"default",
"String", "int", "long", "double", "boolean",
"delegate", "property-based"
};
/// Type of bean being created
final protected BeanDescription _beanDesc;
final protected boolean _canFixAccess;
/**
* @since 2.7
*/
final protected boolean _forceAccess;
/**
* Set of creators we have collected so far
*
* @since 2.5
*/
protected final AnnotatedWithParams[] _creators = new AnnotatedWithParams[9];
/**
* Bitmask of creators that were explicitly marked as creators; false for auto-detected
* (ones included base on naming and/or visibility, not annotation)
*
* @since 2.5
*/
protected int _explicitCreators = 0;
protected boolean _hasNonDefaultCreator = false;
// when there are injectable values along with delegate:
protected SettableBeanProperty[] _delegateArgs;
protected SettableBeanProperty[] _arrayDelegateArgs;
protected SettableBeanProperty[] _propertyBasedArgs;
protected AnnotatedParameter _incompleteParameter;
public CreatorCollector(BeanDescription beanDesc, MapperConfig<?> config)
{
_beanDesc = beanDesc;
_canFixAccess = config.canOverrideAccessModifiers();
_forceAccess = config.isEnabled(MapperFeature.OVERRIDE_PUBLIC_ACCESS_MODIFIERS);
}
public ValueInstantiator constructValueInstantiator(DeserializationConfig config)
{
final JavaType delegateType = _computeDelegateType(_creators[C_DELEGATE], _delegateArgs);
final JavaType arrayDelegateType = _computeDelegateType(_creators[C_ARRAY_DELEGATE], _arrayDelegateArgs);
final JavaType type = _beanDesc.getType();
// Any non-standard creator will prevent; with one exception: int-valued constructor
// that standard containers have can be ignored
if (!_hasNonDefaultCreator) {
/* 10-May-2014, tatu: If we have nothing special, and we are dealing with one
* of "well-known" types, can create a non-reflection-based instantiator.
*/
final Class<?> rawType = type.getRawClass();
if (rawType == Collection.class || rawType == List.class || rawType == ArrayList.class) {
return new Vanilla(Vanilla.TYPE_COLLECTION);
}
if (rawType == Map.class || rawType == LinkedHashMap.class) {
return new Vanilla(Vanilla.TYPE_MAP);
}
if (rawType == HashMap.class) {
return new Vanilla(Vanilla.TYPE_HASH_MAP);
}
}
StdValueInstantiator inst = new StdValueInstantiator(config, type);
inst.configureFromObjectSettings(_creators[C_DEFAULT],
_creators[C_DELEGATE], delegateType, _delegateArgs,
_creators[C_PROPS], _propertyBasedArgs);
inst.configureFromArraySettings(_creators[C_ARRAY_DELEGATE], arrayDelegateType, _arrayDelegateArgs);
inst.configureFromStringCreator(_creators[C_STRING]);
inst.configureFromIntCreator(_creators[C_INT]);
inst.configureFromLongCreator(_creators[C_LONG]);
inst.configureFromDoubleCreator(_creators[C_DOUBLE]);
inst.configureFromBooleanCreator(_creators[C_BOOLEAN]);
inst.configureIncompleteParameter(_incompleteParameter);
return inst;
}
/**
* Method called to indicate the default creator: no-arguments
* constructor or factory method that is called to instantiate
* a value before populating it with data. Default creator is
* only used if no other creators are indicated.
*
* @param creator Creator method; no-arguments constructor or static
* factory method.
*/
public void setDefaultCreator(AnnotatedWithParams creator) {
_creators[C_DEFAULT] = _fixAccess(creator);
}
public void addStringCreator(AnnotatedWithParams creator, boolean explicit) {
verifyNonDup(creator, C_STRING, explicit);
}
public void addIntCreator(AnnotatedWithParams creator, boolean explicit) {
verifyNonDup(creator, C_INT, explicit);
}
public void addLongCreator(AnnotatedWithParams creator, boolean explicit) {
verifyNonDup(creator, C_LONG, explicit);
}
public void addDoubleCreator(AnnotatedWithParams creator, boolean explicit) {
verifyNonDup(creator, C_DOUBLE, explicit);
}
public void addBooleanCreator(AnnotatedWithParams creator, boolean explicit) {
verifyNonDup(creator, C_BOOLEAN, explicit);
}
public void addDelegatingCreator(AnnotatedWithParams creator, boolean explicit,
SettableBeanProperty[] injectables)
{
if (creator.getParameterType(0).isCollectionLikeType()) {
verifyNonDup(creator, C_ARRAY_DELEGATE, explicit);
_arrayDelegateArgs = injectables;
} else {
verifyNonDup(creator, C_DELEGATE, explicit);
_delegateArgs = injectables;
}
}
public void addPropertyCreator(AnnotatedWithParams creator, boolean explicit,
SettableBeanProperty[] properties)
{
verifyNonDup(creator, C_PROPS, explicit);
// [JACKSON-470] Better ensure we have no duplicate names either...
if (properties.length > 1) {
HashMap<String,Integer> names = new HashMap<String,Integer>();
for (int i = 0, len = properties.length; i < len; ++i) {
String name = properties[i].getName();
/* [Issue-13]: Need to consider Injectables, which may not have
* a name at all, and need to be skipped
*/
if (name.length() == 0 && properties[i].getInjectableValueId() != null) {
continue;
}
Integer old = names.put(name, Integer.valueOf(i));
if (old != null) {
throw new IllegalArgumentException("Duplicate creator property \""+name+"\" (index "+old+" vs "+i+")");
}
}
}
_propertyBasedArgs = properties;
}
public void addIncompeteParameter(AnnotatedParameter parameter) {
if (_incompleteParameter == null) {
_incompleteParameter = parameter;
}
}
// Bunch of methods deprecated in 2.5, to be removed from 2.6 or later
@Deprecated // since 2.5
public void addStringCreator(AnnotatedWithParams creator) {
addStringCreator(creator, false);
}
@Deprecated // since 2.5
public void addIntCreator(AnnotatedWithParams creator) {
addBooleanCreator(creator, false);
}
@Deprecated // since 2.5
public void addLongCreator(AnnotatedWithParams creator) {
addBooleanCreator(creator, false);
}
@Deprecated // since 2.5
public void addDoubleCreator(AnnotatedWithParams creator) {
addBooleanCreator(creator, false);
}
@Deprecated // since 2.5
public void addBooleanCreator(AnnotatedWithParams creator) {
addBooleanCreator(creator, false);
}
@Deprecated // since 2.5
public void addDelegatingCreator(AnnotatedWithParams creator, CreatorProperty[] injectables) {
addDelegatingCreator(creator, false, injectables);
}
@Deprecated // since 2.5
public void addPropertyCreator(AnnotatedWithParams creator, CreatorProperty[] properties) {
addPropertyCreator(creator, false, properties);
}
/**
* @since 2.1
*/
public boolean hasDefaultCreator() {
return _creators[C_DEFAULT] != null;
}
/**
* @since 2.6
*/
public boolean hasDelegatingCreator() {
return _creators[C_DELEGATE] != null;
}
/**
* @since 2.6
*/
public boolean hasPropertyBasedCreator() {
return _creators[C_PROPS] != null;
}
private JavaType _computeDelegateType(AnnotatedWithParams creator, SettableBeanProperty[] delegateArgs)
{
if (!_hasNonDefaultCreator || (creator == null)) {
return null;
} else {
// need to find type...
int ix = 0;
if (delegateArgs != null) {
for (int i = 0, len = delegateArgs.length; i < len; ++i) {
if (delegateArgs[i] == null) { // marker for delegate itself
ix = i;
break;
}
}
}
return creator.getParameterType(ix);
}
}
private <T extends AnnotatedMember> T _fixAccess(T member)
{
if (member != null && _canFixAccess) {
ClassUtil.checkAndFixAccess((Member) member.getAnnotated(), _forceAccess);
}
return member;
}
protected void verifyNonDup(AnnotatedWithParams newOne, int typeIndex, boolean explicit)
{
final int mask = (1 << typeIndex);
_hasNonDefaultCreator = true;
AnnotatedWithParams oldOne = _creators[typeIndex];
// already had an explicitly marked one?
if (oldOne != null) {
boolean verify;
if ((_explicitCreators & mask) != 0) { // already had explicitly annotated, leave as-is
// but skip, if new one not annotated
if (!explicit) {
return;
}
// both explicit: verify
verify = true;
} else {
// otherwise only verify if neither explicitly annotated.
verify = !explicit;
}
// one more thing: ok to override in sub-class
if (verify && (oldOne.getClass() == newOne.getClass())) {
// [databind#667]: avoid one particular class of bogus problems
Class<?> oldType = oldOne.getRawParameterType(0);
Class<?> newType = newOne.getRawParameterType(0);
if (oldType == newType) {
throw new IllegalArgumentException("Conflicting "+TYPE_DESCS[typeIndex]
+" creators: already had explicitly marked "+oldOne+", encountered "+newOne);
}
// otherwise, which one to choose?
if (newType.isAssignableFrom(oldType)) {
// new type more generic, use old
return;
}
// new type more specific, use it
}
}
if (explicit) {
_explicitCreators |= mask;
}
_creators[typeIndex] = _fixAccess(newOne);
}
protected final static class Vanilla
extends ValueInstantiator
implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
public final static int TYPE_COLLECTION = 1;
public final static int TYPE_MAP = 2;
public final static int TYPE_HASH_MAP = 3;
private final int _type;
public Vanilla(int t) {
_type = t;
}
@Override
public String getValueTypeDesc() {
switch (_type) {
case TYPE_COLLECTION: return ArrayList.class.getName();
case TYPE_MAP: return LinkedHashMap.class.getName();
case TYPE_HASH_MAP: return HashMap.class.getName();
}
return Object.class.getName();
}
@Override
public boolean canInstantiate() { return true; }
@Override
public boolean canCreateUsingDefault() { return true; }
@Override
public Object createUsingDefault(DeserializationContext ctxt) throws IOException {
switch (_type) {
case TYPE_COLLECTION: return new ArrayList<Object>();
case TYPE_MAP: return new LinkedHashMap<String,Object>();
case TYPE_HASH_MAP: return new HashMap<String,Object>();
}
throw new IllegalStateException("Unknown type "+_type);
}
}
}
|
package com.ferreusveritas.dynamictrees.models;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import org.lwjgl.util.vector.Vector3f;
import com.ferreusveritas.dynamictrees.blocks.BlockBranch;
import com.google.common.collect.Maps;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.renderer.block.model.BakedQuad;
import net.minecraft.client.renderer.block.model.BlockFaceUV;
import net.minecraft.client.renderer.block.model.BlockPart;
import net.minecraft.client.renderer.block.model.BlockPartFace;
import net.minecraft.client.renderer.block.model.FaceBakery;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.client.renderer.block.model.ItemOverrideList;
import net.minecraft.client.renderer.block.model.ModelRotation;
import net.minecraft.client.renderer.block.model.SimpleBakedModel;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumFacing.Axis;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.ResourceLocation;
public class CompositeThickModel extends CompositeBasicModel {
private IBakedModel trunks[] = new IBakedModel[16];
public CompositeThickModel(ResourceLocation barkRes, ResourceLocation ringsRes, Function<ResourceLocation, TextureAtlasSprite> bakedTextureGetter) {
super(barkRes, ringsRes, bakedTextureGetter);
TextureAtlasSprite barkIcon = bakedTextureGetter.apply(barkRes);
TextureAtlasSprite ringIcon = bakedTextureGetter.apply(ringsRes);
barkParticles = barkIcon;
for(int i = 0; i < 16; i++) {
int radius = i + 9;
trunks[i] = bakeTrunk(radius, barkIcon, ringIcon);
}
}
public IBakedModel bakeTrunk(int radius, TextureAtlasSprite bark, TextureAtlasSprite ring) {
Vector3f posFrom = new Vector3f(8 - radius, 0, 8 - radius);
Vector3f posTo = new Vector3f(8 + radius, 16, 8 + radius);
Map<EnumFacing, BlockPartFace> mapFacesIn = Maps.newEnumMap(EnumFacing.class);
for(EnumFacing face: EnumFacing.VALUES) {
BlockFaceUV uvface = new BlockFaceUV(new float[]{ 0, 0, 16, 16 }, getFaceAngle(Axis.Y, face));
mapFacesIn.put(face, new BlockPartFace(null, -1, null, uvface));
}
BlockPart part = new BlockPart(posFrom, posTo, mapFacesIn, null, true);
SimpleBakedModel.Builder builder = new SimpleBakedModel.Builder(modelBlock, ItemOverrideList.NONE).setTexture(ring);
for(Map.Entry<EnumFacing, BlockPartFace> e : part.mapFaces.entrySet()) {
EnumFacing face = e.getKey();
builder.addFaceQuad(face, makeBakedQuad(part, e.getValue(), face.getAxis() == Axis.Y ? ring : bark, face, ModelRotation.X0_Y0, false));
}
return builder.makeBakedModel();
}
protected BakedQuad makeBakedQuad(BlockPart blockPart, BlockPartFace partFace, TextureAtlasSprite atlasSprite, EnumFacing dir, net.minecraftforge.common.model.ITransformation transform, boolean uvlocked) {
return new FaceBakery().makeBakedQuad(blockPart.positionFrom, blockPart.positionTo, partFace, atlasSprite, dir, transform, blockPart.partRotation, uvlocked, blockPart.shade);
}
@Override
public List<BakedQuad> getQuads(IBlockState blockState, EnumFacing side, long rand) {
int coreRadius = getRadius(blockState);
if(coreRadius <= BlockBranch.RADMAX_NORMAL) {
return super.getQuads(blockState, side, rand);
}
coreRadius = MathHelper.clamp(coreRadius, 9, 24);
List<BakedQuad> quadsList = new LinkedList<BakedQuad>();
quadsList.addAll(trunks[coreRadius-9].getQuads(blockState, side, rand));
return quadsList;
}
}
|
package org.batfish.main;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.FileVisitResult;
import java.nio.file.FileVisitor;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.SimpleFileVisitor;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.tree.ParseTreeWalker;
import org.apache.commons.exec.CommandLine;
import org.apache.commons.exec.DefaultExecutor;
import org.apache.commons.exec.ExecuteException;
import org.apache.commons.exec.PumpStreamHandler;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.exception.ExceptionUtils;
import org.batfish.common.BatfishLogger;
import org.batfish.common.BfConsts;
import org.batfish.common.BfJson;
import org.batfish.common.BatfishException;
import org.batfish.common.CleanBatfishException;
import org.batfish.common.Pair;
import org.batfish.common.util.StringFilter;
import org.batfish.common.util.UrlZipExplorer;
import org.batfish.common.util.CommonUtil;
import org.batfish.datamodel.AsPath;
import org.batfish.datamodel.AsSet;
import org.batfish.datamodel.BgpAdvertisement;
import org.batfish.datamodel.Edge;
import org.batfish.datamodel.Flow;
import org.batfish.datamodel.FlowBuilder;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.LBValueType;
import org.batfish.datamodel.PrecomputedRoute;
import org.batfish.datamodel.Prefix;
import org.batfish.datamodel.SubRange;
import org.batfish.datamodel.answers.Answer;
import org.batfish.datamodel.answers.AnswerStatus;
import org.batfish.datamodel.answers.StringAnswer;
import org.batfish.datamodel.collections.AdvertisementSet;
import org.batfish.datamodel.collections.CommunitySet;
import org.batfish.datamodel.collections.EdgeSet;
import org.batfish.datamodel.collections.FibMap;
import org.batfish.datamodel.collections.FibRow;
import org.batfish.datamodel.collections.FibSet;
import org.batfish.datamodel.collections.FunctionSet;
import org.batfish.datamodel.collections.IbgpTopology;
import org.batfish.datamodel.collections.InterfaceSet;
import org.batfish.datamodel.collections.IpEdge;
import org.batfish.datamodel.collections.LBValueTypeList;
import org.batfish.datamodel.collections.MultiSet;
import org.batfish.datamodel.collections.NodeInterfacePair;
import org.batfish.datamodel.collections.NodeIpPair;
import org.batfish.datamodel.collections.NodeRoleMap;
import org.batfish.datamodel.collections.NodeSet;
import org.batfish.datamodel.collections.PolicyRouteFibIpMap;
import org.batfish.datamodel.collections.PolicyRouteFibNodeMap;
import org.batfish.datamodel.collections.PredicateSemantics;
import org.batfish.datamodel.collections.PredicateValueTypeMap;
import org.batfish.datamodel.collections.QualifiedNameMap;
import org.batfish.datamodel.collections.RoleSet;
import org.batfish.datamodel.collections.RouteSet;
import org.batfish.datamodel.collections.TreeMultiSet;
import org.batfish.datamodel.questions.AclReachabilityQuestion;
import org.batfish.datamodel.questions.CompareSameNameQuestion;
import org.batfish.datamodel.questions.DestinationQuestion;
import org.batfish.datamodel.questions.IngressPathQuestion;
import org.batfish.datamodel.questions.LocalPathQuestion;
import org.batfish.datamodel.questions.MultipathQuestion;
import org.batfish.datamodel.questions.NeighborsQuestion;
import org.batfish.datamodel.questions.NodesQuestion;
import org.batfish.datamodel.questions.ProtocolDependenciesQuestion;
import org.batfish.datamodel.questions.Question;
import org.batfish.datamodel.questions.QuestionParameters;
import org.batfish.datamodel.questions.ReachabilityQuestion;
import org.batfish.datamodel.questions.ReducedReachabilityQuestion;
import org.batfish.datamodel.questions.TracerouteQuestion;
import org.batfish.grammar.BatfishCombinedParser;
import org.batfish.grammar.ParseTreePrettyPrinter;
import org.batfish.grammar.juniper.JuniperCombinedParser;
import org.batfish.grammar.juniper.JuniperFlattener;
import org.batfish.grammar.logicblox.LogQLPredicateInfoExtractor;
import org.batfish.grammar.logicblox.LogiQLCombinedParser;
import org.batfish.grammar.logicblox.LogiQLPredicateInfoResolver;
import org.batfish.grammar.question.QuestionCombinedParser;
import org.batfish.grammar.question.QuestionExtractor;
import org.batfish.grammar.question.QuestionParametersCombinedParser;
import org.batfish.grammar.question.QuestionParametersExtractor;
import org.batfish.grammar.topology.BatfishTopologyCombinedParser;
import org.batfish.grammar.topology.BatfishTopologyExtractor;
import org.batfish.grammar.topology.GNS3TopologyCombinedParser;
import org.batfish.grammar.topology.GNS3TopologyExtractor;
import org.batfish.grammar.topology.RoleCombinedParser;
import org.batfish.grammar.topology.RoleExtractor;
import org.batfish.grammar.topology.TopologyExtractor;
import org.batfish.grammar.vyos.VyosCombinedParser;
import org.batfish.grammar.vyos.VyosFlattener;
import org.batfish.job.BatfishJobExecutor;
import org.batfish.job.ConvertConfigurationJob;
import org.batfish.job.ConvertConfigurationResult;
import org.batfish.job.FlattenVendorConfigurationJob;
import org.batfish.job.FlattenVendorConfigurationResult;
import org.batfish.job.ParseVendorConfigurationJob;
import org.batfish.job.ParseVendorConfigurationResult;
import org.batfish.logic.LogicResourceLocator;
import org.batfish.main.Settings.EnvironmentSettings;
import org.batfish.nxtnet.Block;
import org.batfish.nxtnet.Column;
import org.batfish.nxtnet.ConfigurationFactExtractor;
import org.batfish.nxtnet.EntityTable;
import org.batfish.nxtnet.Facts;
import org.batfish.nxtnet.NxtnetConstants;
import org.batfish.nxtnet.PredicateInfo;
import org.batfish.nxtnet.Relation;
import org.batfish.nxtnet.TopologyFactExtractor;
import org.batfish.protocoldependency.ProtocolDependencyAnalysis;
import org.batfish.question.Environment;
import org.batfish.question.VerifyProgram;
import org.batfish.question.VerifyQuestion;
import org.batfish.representation.BgpNeighbor;
import org.batfish.representation.BgpProcess;
import org.batfish.representation.Configuration;
import org.batfish.representation.DataPlane;
import org.batfish.representation.FlowHistory;
import org.batfish.representation.FlowTrace;
import org.batfish.representation.GenericConfigObject;
import org.batfish.representation.Interface;
import org.batfish.representation.IpAccessList;
import org.batfish.representation.IpAccessListLine;
import org.batfish.representation.IpsecVpn;
import org.batfish.representation.LineAction;
import org.batfish.representation.OspfArea;
import org.batfish.representation.OspfProcess;
import org.batfish.representation.PolicyMap;
import org.batfish.representation.PolicyMapAction;
import org.batfish.representation.PolicyMapClause;
import org.batfish.representation.PolicyMapMatchRouteFilterListLine;
import org.batfish.representation.RouteFilterLine;
import org.batfish.representation.RouteFilterList;
import org.batfish.representation.Topology;
import org.batfish.representation.VendorConfiguration;
import org.batfish.representation.aws_vpcs.AwsVpcConfiguration;
import org.batfish.util.Util;
import org.batfish.z3.AclLine;
import org.batfish.z3.AclReachabilityQuerySynthesizer;
import org.batfish.z3.BlacklistDstIpQuerySynthesizer;
import org.batfish.z3.CompositeNodJob;
import org.batfish.z3.MultipathInconsistencyQuerySynthesizer;
import org.batfish.z3.NodJob;
import org.batfish.z3.NodJobResult;
import org.batfish.z3.NodSatJob;
import org.batfish.z3.NodSatResult;
import org.batfish.z3.QuerySynthesizer;
import org.batfish.z3.ReachEdgeQuerySynthesizer;
import org.batfish.z3.ReachabilityQuerySynthesizer;
import org.batfish.z3.ReachableQuerySynthesizer;
import org.batfish.z3.Synthesizer;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
/**
* This class encapsulates the main control logic for Batfish.
*/
public class Batfish implements AutoCloseable {
// private static final String BGP_ADVERTISEMENT_ROUTE_PREDICATE_NAME =
// "BgpAdvertisementRoute";
private static final String BGP_ADVERTISEMENT_PREDICATE_NAME = "BgpAdvertisement";
/**
* Name of the LogiQL data-plane predicate containing next hop information
* for policy-routing
*/
private static final String FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME = "FibForwardPolicyRouteNextHopIp";
/**
* Name of the LogiQL data-plane predicate containing next hop information
* for destination-based routing
*/
private static final String FIB_PREDICATE_NAME = "FibNetwork";
private static final String FLOW_HISTORY_PREDICATE_NAME = "FlowPathHistory";
/**
* Name of the LogiQL predicate containing flow-sink interface tags
*/
private static final String FLOW_SINK_PREDICATE_NAME = "SetFlowSinkInterface";
private static final String GEN_OSPF_STARTING_IP = "10.0.0.0";
private static final String IBGP_NEIGHBORS_PREDICATE_NAME = "IbgpNeighbors";
private static final String INSTALLED_ROUTE_PREDICATE_NAME = "InstalledRoute";
/**
* A byte-array containing the first 4 bytes of the header for a file that is
* the output of java serialization
*/
private static final byte[] JAVA_SERIALIZED_OBJECT_HEADER = {
(byte) 0xac,
(byte) 0xed,
(byte) 0x00,
(byte) 0x05 };
/**
* The name of the LogiQL library for org.batfish
*/
private static final String LB_BATFISH_LIBRARY_NAME = "libbatfish";
private static final String NETWORKS_PREDICATE_NAME = "SetNetwork";
private static final String NXTNET_COMMAND = "nxtnet";
private static final String PRECOMPUTED_BGP_ADVERTISEMENT_AS_PATH_LENGTH_PREDICATE_NAME = "SetBgpAdvertisementPathSize";
private static final String PRECOMPUTED_BGP_ADVERTISEMENT_AS_PATH_PREDICATE_NAME = "SetBgpAdvertisementPath";
private static final String PRECOMPUTED_BGP_ADVERTISEMENT_COMMUNITY_PREDICATE_NAME = "SetBgpAdvertisementCommunity";
private static final String PRECOMPUTED_BGP_ADVERTISEMENTS_PREDICATE_NAME = "SetBgpAdvertisement_flat";
private static final String PRECOMPUTED_IBGP_NEIGHBORS_PREDICATE_NAME = "SetIbgpNeighbors";
private static final String PRECOMPUTED_ROUTES_PREDICATE_NAME = "SetPrecomputedRoute_flat";
/**
* The name of the file in which LogiQL predicate type-information and
* documentation is serialized
*/
private static final String PREDICATE_INFO_FILENAME = "predicateInfo.object";
/**
* A string containing the system-specific path separator character
*/
private static final String SEPARATOR = System.getProperty("file.separator");
/**
* Role name for generated stubs
*/
private static final String STUB_ROLE = "generated_stubs";
/**
* The name of the [optional] topology file within a test-rig
*/
private static final String TOPOLOGY_FILENAME = "topology.net";
public static void applyAutoBaseDir(final Settings settings) {
String baseDir = settings.getAutoBaseDir();
if (baseDir != null) {
EnvironmentSettings envSettings = settings
.getBaseEnvironmentSettings();
EnvironmentSettings diffEnvSettings = settings
.getDiffEnvironmentSettings();
settings.setSerializeIndependentPath(Paths.get(baseDir,
BfConsts.RELPATH_VENDOR_INDEPENDENT_CONFIG_DIR).toString());
settings.setSerializeVendorPath(Paths.get(baseDir,
BfConsts.RELPATH_VENDOR_SPECIFIC_CONFIG_DIR).toString());
settings.setTestRigPath(Paths.get(baseDir,
BfConsts.RELPATH_TEST_RIG_DIR).toString());
settings.setProtocolDependencyGraphPath(Paths.get(baseDir,
BfConsts.RELPATH_PROTOCOL_DEPENDENCY_GRAPH).toString());
String envName = settings.getEnvironmentName();
if (envName != null) {
envSettings.setName(envName);
Path envPath = Paths.get(baseDir,
BfConsts.RELPATH_ENVIRONMENTS_DIR, envName);
envSettings.setControlPlaneFactsDir(envPath.resolve(
BfConsts.RELPATH_CONTROL_PLANE_FACTS_DIR).toString());
envSettings.setNxtnetDataPlaneInputFile(envPath.resolve(
BfConsts.RELPATH_NXTNET_INPUT_FILE).toString());
envSettings.setNxtnetDataPlaneOutputDir(envPath.resolve(
BfConsts.RELPATH_NXTNET_OUTPUT_DIR).toString());
envSettings.setDataPlanePath(envPath.resolve(
BfConsts.RELPATH_DATA_PLANE_DIR).toString());
settings.setZ3DataPlaneFile(envPath.resolve(
BfConsts.RELPATH_Z3_DATA_PLANE_FILE).toString());
Path envDirPath = envPath.resolve(BfConsts.RELPATH_ENV_DIR);
envSettings.setEnvPath(envDirPath.toString());
envSettings.setNodeBlacklistPath(envDirPath.resolve(
BfConsts.RELPATH_NODE_BLACKLIST_FILE).toString());
envSettings.setInterfaceBlacklistPath(envDirPath.resolve(
BfConsts.RELPATH_INTERFACE_BLACKLIST_FILE).toString());
envSettings.setEdgeBlacklistPath(envDirPath.resolve(
BfConsts.RELPATH_EDGE_BLACKLIST_FILE).toString());
envSettings.setSerializedTopologyPath(envDirPath.resolve(
BfConsts.RELPATH_TOPOLOGY_FILE).toString());
envSettings.setDeltaConfigurationsDir(envDirPath.resolve(
BfConsts.RELPATH_CONFIGURATIONS_DIR).toString());
envSettings.setExternalBgpAnnouncementsPath(envDirPath.resolve(
BfConsts.RELPATH_EXTERNAL_BGP_ANNOUNCEMENTS).toString());
envSettings.setPrecomputedRoutesPath(envPath.resolve(
BfConsts.RELPATH_PRECOMPUTED_ROUTES).toString());
}
String diffEnvName = settings.getDiffEnvironmentName();
if (diffEnvName != null) {
diffEnvSettings.setName(diffEnvName);
Path diffEnvPath = Paths.get(baseDir,
BfConsts.RELPATH_ENVIRONMENTS_DIR, diffEnvName);
diffEnvSettings.setControlPlaneFactsDir(diffEnvPath.resolve(
BfConsts.RELPATH_CONTROL_PLANE_FACTS_DIR).toString());
diffEnvSettings.setNxtnetDataPlaneInputFile(diffEnvPath.resolve(
BfConsts.RELPATH_NXTNET_INPUT_FILE).toString());
diffEnvSettings.setNxtnetDataPlaneOutputDir(diffEnvPath.resolve(
BfConsts.RELPATH_NXTNET_OUTPUT_DIR).toString());
diffEnvSettings.setDataPlanePath(diffEnvPath.resolve(
BfConsts.RELPATH_DATA_PLANE_DIR).toString());
Path diffEnvDirPath = diffEnvPath.resolve(BfConsts.RELPATH_ENV_DIR);
diffEnvSettings.setEnvPath(diffEnvDirPath.toString());
diffEnvSettings.setNodeBlacklistPath(diffEnvDirPath.resolve(
BfConsts.RELPATH_NODE_BLACKLIST_FILE).toString());
diffEnvSettings.setInterfaceBlacklistPath(diffEnvDirPath.resolve(
BfConsts.RELPATH_INTERFACE_BLACKLIST_FILE).toString());
diffEnvSettings.setEdgeBlacklistPath(diffEnvDirPath.resolve(
BfConsts.RELPATH_EDGE_BLACKLIST_FILE).toString());
diffEnvSettings.setSerializedTopologyPath(diffEnvDirPath.resolve(
BfConsts.RELPATH_TOPOLOGY_FILE).toString());
diffEnvSettings.setDeltaConfigurationsDir(diffEnvDirPath.resolve(
BfConsts.RELPATH_CONFIGURATIONS_DIR).toString());
diffEnvSettings.setExternalBgpAnnouncementsPath(diffEnvDirPath
.resolve(BfConsts.RELPATH_EXTERNAL_BGP_ANNOUNCEMENTS)
.toString());
diffEnvSettings.setPrecomputedRoutesPath(diffEnvPath.resolve(
BfConsts.RELPATH_PRECOMPUTED_ROUTES).toString());
if (settings.getDiffActive()) {
settings.setActiveEnvironmentSettings(diffEnvSettings);
}
}
String outputEnvName = settings.getOutputEnvironmentName();
if (outputEnvName != null) {
Path outputEnvPath = Paths.get(baseDir,
BfConsts.RELPATH_ENVIRONMENTS_DIR, outputEnvName);
envSettings.setPrecomputedRoutesPath(outputEnvPath.resolve(
BfConsts.RELPATH_PRECOMPUTED_ROUTES).toString());
}
String questionName = settings.getQuestionName();
if (questionName != null) {
Path questionPath = Paths.get(baseDir,
BfConsts.RELPATH_QUESTIONS_DIR, questionName);
settings.setQuestionPath(questionPath.resolve(
BfConsts.RELPATH_QUESTION_FILE).toString());
settings.setQuestionParametersPath(questionPath.resolve(
BfConsts.RELPATH_QUESTION_PARAM_FILE).toString());
if (diffEnvName != null) {
diffEnvSettings.setTrafficFactDumpDir(questionPath
.resolve(
Paths.get(BfConsts.RELPATH_DIFF, envName,
diffEnvName,
BfConsts.RELPATH_CONTROL_PLANE_FACTS_DIR)
.toString()).toString());
diffEnvSettings.setNxtnetTrafficInputFile(questionPath.resolve(
Paths.get(BfConsts.RELPATH_DIFF, envName, diffEnvName,
BfConsts.RELPATH_NXTNET_INPUT_FILE).toString())
.toString());
diffEnvSettings.setNxtnetTrafficOutputDir(questionPath.resolve(
Paths.get(BfConsts.RELPATH_DIFF, envName, diffEnvName,
BfConsts.RELPATH_NXTNET_OUTPUT_DIR).toString())
.toString());
envSettings.setTrafficFactDumpDir(questionPath
.resolve(
Paths.get(BfConsts.RELPATH_BASE, envName,
diffEnvName,
BfConsts.RELPATH_CONTROL_PLANE_FACTS_DIR)
.toString()).toString());
envSettings.setNxtnetTrafficInputFile(questionPath.resolve(
Paths.get(BfConsts.RELPATH_BASE, envName, diffEnvName,
BfConsts.RELPATH_NXTNET_INPUT_FILE).toString())
.toString());
envSettings.setNxtnetTrafficOutputDir(questionPath.resolve(
Paths.get(BfConsts.RELPATH_BASE, envName, diffEnvName,
BfConsts.RELPATH_NXTNET_OUTPUT_DIR).toString())
.toString());
}
else {
envSettings.setTrafficFactDumpDir(questionPath
.resolve(
Paths.get(BfConsts.RELPATH_BASE, envName,
BfConsts.RELPATH_CONTROL_PLANE_FACTS_DIR)
.toString()).toString());
envSettings.setNxtnetTrafficInputFile(questionPath.resolve(
Paths.get(BfConsts.RELPATH_BASE, envName,
BfConsts.RELPATH_NXTNET_INPUT_FILE).toString())
.toString());
envSettings.setNxtnetTrafficOutputDir(questionPath.resolve(
Paths.get(BfConsts.RELPATH_BASE, envName,
BfConsts.RELPATH_NXTNET_OUTPUT_DIR).toString())
.toString());
}
}
}
}
public static String flatten(String input, BatfishLogger logger,
Settings settings, ConfigurationFormat format, String header) {
switch (format) {
case JUNIPER: {
JuniperCombinedParser parser = new JuniperCombinedParser(input,
settings);
ParserRuleContext tree = parse(parser, logger, settings);
JuniperFlattener flattener = new JuniperFlattener(header);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(flattener, tree);
return flattener.getFlattenedConfigurationText();
}
case VYOS: {
VyosCombinedParser parser = new VyosCombinedParser(input, settings);
ParserRuleContext tree = parse(parser, logger, settings);
VyosFlattener flattener = new VyosFlattener(header);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(flattener, tree);
return flattener.getFlattenedConfigurationText();
}
// $CASES-OMITTED$
default:
throw new BatfishException("Invalid format for flattening");
}
}
private static void initControlPlaneFactBins(
Map<String, StringBuilder> factBins, boolean addHeaders) {
initFactBins(Facts.CONTROL_PLANE_FACT_COLUMN_HEADERS, factBins,
addHeaders);
}
private static void initFactBins(Map<String, String> columnHeaderMap,
Map<String, StringBuilder> factBins, boolean addHeaders) {
for (String factPredicate : columnHeaderMap.keySet()) {
if (addHeaders) {
String columnHeaders = columnHeaderMap.get(factPredicate);
String initialText = columnHeaders + "\n";
factBins.put(factPredicate, new StringBuilder(initialText));
}
else {
factBins.put(factPredicate, new StringBuilder());
}
}
}
private static void initTrafficFactBins(Map<String, StringBuilder> factBins) {
initFactBins(Facts.TRAFFIC_FACT_COLUMN_HEADERS, factBins, true);
}
public static ParserRuleContext parse(BatfishCombinedParser<?, ?> parser,
BatfishLogger logger, Settings settings) {
ParserRuleContext tree;
try {
tree = parser.parse();
}
catch (BatfishException e) {
throw new ParserBatfishException("Parser error", e);
}
List<String> errors = parser.getErrors();
int numErrors = errors.size();
if (numErrors > 0) {
logger.error(numErrors + " ERROR(S)\n");
for (int i = 0; i < numErrors; i++) {
String prefix = "ERROR " + (i + 1) + ": ";
String msg = errors.get(i);
String prefixedMsg = CommonUtil.applyPrefix(prefix, msg);
logger.error(prefixedMsg + "\n");
}
throw new ParserBatfishException("Parser error(s)");
}
else if (!settings.printParseTree()) {
logger.info("OK\n");
}
else {
logger.info("OK, PRINTING PARSE TREE:\n");
logger.info(ParseTreePrettyPrinter.print(tree, parser) + "\n\n");
}
return tree;
}
public static ParserRuleContext parse(BatfishCombinedParser<?, ?> parser,
String filename, BatfishLogger logger, Settings settings) {
logger.info("Parsing: \"" + filename + "\" ...");
return parse(parser, logger, settings);
}
private EnvironmentSettings _baseEnvSettings;
private EnvironmentSettings _diffEnvSettings;
private final Map<EnvironmentSettings, EntityTable> _entityTables;
private EnvironmentSettings _envSettings;
private BatfishLogger _logger;
private PredicateInfo _predicateInfo;
private Settings _settings;
// this variable is used communicate with parent thread on how the job
// finished
private boolean _terminatedWithException;
private long _timerCount;
private File _tmpLogicDir;
public Batfish(Settings settings) {
_settings = settings;
_envSettings = settings.getActiveEnvironmentSettings();
_baseEnvSettings = settings.getBaseEnvironmentSettings();
_diffEnvSettings = settings.getDiffEnvironmentSettings();
_logger = _settings.getLogger();
_tmpLogicDir = null;
_entityTables = new HashMap<EnvironmentSettings, EntityTable>();
_terminatedWithException = false;
}
private void anonymizeConfigurations() {
// TODO Auto-generated method stub
}
private void answer() {
Question question = parseQuestion();
boolean dp = question.getDataPlane();
boolean diff = question.getDifferential();
boolean diffActive = (question.getDiffActive() || _settings
.getDiffActive()) && !diff;
_settings.setDiffActive(diffActive);
_settings.setDiffQuestion(diff);
// TODO: fix hack for verify questions
if (!dp || question instanceof VerifyQuestion) {
_settings.setNxtnetTraffic(false);
_settings.setHistory(false);
}
initQuestionEnvironments(question, diff, diffActive, dp);
switch (question.getType()) {
case ACL_REACHABILITY:
answerAclReachability((AclReachabilityQuestion) question);
break;
case COMPARE_SAME_NAME:
answerCompareSameName((CompareSameNameQuestion) question);
break;
case DESTINATION:
answerDestination((DestinationQuestion) question);
break;
case INGRESS_PATH:
answerIngressPath((IngressPathQuestion) question);
break;
case LOCAL_PATH:
answerLocalPath((LocalPathQuestion) question);
break;
case MULTIPATH:
answerMultipath((MultipathQuestion) question);
break;
case NEIGHBORS:
answerNeighbors((NeighborsQuestion) question);
break;
case NODES:
answerNodes((NodesQuestion) question);
break;
case PROTOCOL_DEPENDENCIES:
answerProtocolDependencies((ProtocolDependenciesQuestion) question);
break;
case REACHABILITY:
answerReachability((ReachabilityQuestion) question);
break;
case REDUCED_REACHABILITY:
answerReducedReachability((ReducedReachabilityQuestion) question);
break;
case TRACEROUTE:
answerTraceroute((TracerouteQuestion) question);
break;
case VERIFY:
answerVerify((VerifyQuestion) question);
break;
default:
throw new BatfishException("Unknown question type");
}
}
private void answerAclReachability(AclReachabilityQuestion question) {
checkConfigurations();
Map<String, Configuration> configurations = loadConfigurations();
Synthesizer aclSynthesizer = synthesizeAcls(configurations);
List<NodSatJob<AclLine>> jobs = new ArrayList<NodSatJob<AclLine>>();
for (Entry<String, Configuration> e : configurations.entrySet()) {
String hostname = e.getKey();
Configuration c = e.getValue();
for (Entry<String, IpAccessList> e2 : c.getIpAccessLists().entrySet()) {
String aclName = e2.getKey();
// skip juniper srx inbound filters, as they can't really contain
// operator error
if (aclName.contains("~ZONE_INTERFACE_FILTER~")
|| aclName.contains("~INBOUND_ZONE_FILTER~")) {
continue;
}
IpAccessList acl = e2.getValue();
int numLines = acl.getLines().size();
if (numLines == 0) {
_logger.redflag("RED_FLAG: Acl \"" + hostname + ":" + aclName
+ "\" contains no lines\n");
continue;
}
AclReachabilityQuerySynthesizer query = new AclReachabilityQuerySynthesizer(
hostname, aclName, numLines);
NodSatJob<AclLine> job = new NodSatJob<AclLine>(aclSynthesizer,
query);
jobs.add(job);
}
}
Map<AclLine, Boolean> output = new TreeMap<AclLine, Boolean>();
computeNodSatOutput(jobs, output);
Set<Pair<String, String>> aclsWithUnreachableLines = new TreeSet<Pair<String, String>>();
Set<Pair<String, String>> allAcls = new TreeSet<Pair<String, String>>();
int numUnreachableLines = 0;
int numLines = output.entrySet().size();
for (Entry<AclLine, Boolean> e : output.entrySet()) {
AclLine aclLine = e.getKey();
boolean sat = e.getValue();
String hostname = aclLine.getHostname();
String aclName = aclLine.getAclName();
Pair<String, String> qualifiedAclName = new Pair<String, String>(
hostname, aclName);
allAcls.add(qualifiedAclName);
if (!sat) {
numUnreachableLines++;
aclsWithUnreachableLines.add(qualifiedAclName);
}
}
for (Entry<AclLine, Boolean> e : output.entrySet()) {
AclLine aclLine = e.getKey();
boolean sat = e.getValue();
String hostname = aclLine.getHostname();
String aclName = aclLine.getAclName();
Pair<String, String> qualifiedAclName = new Pair<String, String>(
hostname, aclName);
if (aclsWithUnreachableLines.contains(qualifiedAclName)) {
int line = aclLine.getLine();
if (sat) {
_logger.outputf("%s:%s:%d is REACHABLE\n", hostname, aclName,
line);
}
else {
Configuration c = configurations.get(aclLine.getHostname());
IpAccessList acl = c.getIpAccessLists()
.get(aclLine.getAclName());
IpAccessListLine ipAccessListLine = acl.getLines().get(line);
_logger.outputf("%s:%s:%d is UNREACHABLE\n\t%s\n", hostname,
aclName, line, ipAccessListLine.toString());
aclsWithUnreachableLines.add(qualifiedAclName);
}
}
}
for (Pair<String, String> qualfiedAcl : aclsWithUnreachableLines) {
String hostname = qualfiedAcl.getFirst();
String aclName = qualfiedAcl.getSecond();
_logger.outputf("%s:%s has at least 1 unreachable line\n", hostname,
aclName);
}
int numAclsWithUnreachableLines = aclsWithUnreachableLines.size();
int numAcls = allAcls.size();
double percentUnreachableAcls = 100d * numAclsWithUnreachableLines
/ numAcls;
double percentUnreachableLines = 100d * numUnreachableLines / numLines;
_logger.outputf("SUMMARY:\n");
_logger.outputf("\t%d/%d (%.1f%%) acls have unreachable lines\n",
numAclsWithUnreachableLines, numAcls, percentUnreachableAcls);
_logger.outputf("\t%d/%d (%.1f%%) acl lines are unreachable\n",
numUnreachableLines, numLines, percentUnreachableLines);
String jsonOutputPath = _settings.getAnswerJsonPath();
if (jsonOutputPath != null) {
String jsonOutput = null;
try {
JSONObject query = new JSONObject();
Map<String, StringBuilder> nodeDescriptions = new HashMap<String, StringBuilder>();
Map<String, JSONObject> nodeObjects = new HashMap<String, JSONObject>();
query.put("name", _settings.getQuestionName());
query.put("type", "query");
JSONObject views = new JSONObject();
query.put("views", views);
String viewName = "Nodes with unreachable ACL lines";
JSONObject view = new JSONObject();
views.put(viewName, view);
view.put("name", viewName);
view.put("type", "view");
view.put("color", "error");
JSONObject nodes = new JSONObject();
view.put("nodes", nodes);
for (Entry<AclLine, Boolean> e : output.entrySet()) {
AclLine aclLine = e.getKey();
boolean sat = e.getValue();
String hostname = aclLine.getHostname();
String aclName = aclLine.getAclName();
Pair<String, String> qualifiedAclName = new Pair<String, String>(
hostname, aclName);
if (aclsWithUnreachableLines.contains(qualifiedAclName)) {
int line = aclLine.getLine();
JSONObject node;
if (nodeObjects.containsKey(hostname)) {
node = nodes.getJSONObject(hostname);
}
else {
node = new JSONObject();
nodes.put(hostname, node);
node.put("name", hostname);
node.put("type", "node");
StringBuilder nodeDescriptionBuilder = new StringBuilder();
nodeObjects.put(hostname, node);
nodeDescriptions.put(hostname, nodeDescriptionBuilder);
}
StringBuilder nodeDescriptionBuilder = nodeDescriptions
.get(hostname);
if (!sat) {
Configuration c = configurations
.get(aclLine.getHostname());
IpAccessList acl = c.getIpAccessLists().get(
aclLine.getAclName());
IpAccessListLine ipAccessListLine = acl.getLines().get(
line);
nodeDescriptionBuilder
.append(String
.format(
"ACL: \"%s\", line: %d is UNREACHABLE<br> %s<br>",
aclName, line,
ipAccessListLine.toString()));
}
}
}
for (String hostname : nodeObjects.keySet()) {
JSONObject node = nodeObjects.get(hostname);
StringBuilder descriptionBuilder = nodeDescriptions
.get(hostname);
node.put("description", descriptionBuilder.toString());
}
jsonOutput = query.toString(3);
}
catch (JSONException e) {
throw new BatfishException(
"Error converting acl reachability analysis output to json",
e);
}
Util.writeFile(jsonOutputPath, jsonOutput);
}
}
private void answerCompareSameName(CompareSameNameQuestion question) {
checkConfigurations();
Map<String, Configuration> configurations = loadConfigurations();
// collect nodes nodes
Pattern nodeRegex;
try {
nodeRegex = Pattern.compile(question.getNodeRegex());
}
catch (PatternSyntaxException e) {
throw new BatfishException(
"Supplied regex for nodes is not a valid java regex: \""
+ question.getNodeRegex() + "\"", e);
}
Set<String> nodes = new TreeSet<String>();
if (nodeRegex != null) {
for (String node : configurations.keySet()) {
Matcher nodeMatcher = nodeRegex.matcher(node);
if (nodeMatcher.matches()) {
nodes.add(node);
}
}
}
else {
nodes.addAll(configurations.keySet());
}
throw new UnsupportedOperationException(
"no implementation for generated method"); // TODO Auto-generated
}
private void answerDestination(DestinationQuestion question) {
checkDifferentialDataPlaneQuestionDependencies();
throw new UnsupportedOperationException(
"no implementation for generated method"); // TODO Auto-generated
// method stub
}
private void answerIngressPath(IngressPathQuestion question) {
checkDifferentialDataPlaneQuestionDependencies();
throw new UnsupportedOperationException(
"no implementation for generated method"); // TODO Auto-generated
// method stub
}
private void answerLocalPath(LocalPathQuestion question) {
checkDifferentialDataPlaneQuestionDependencies();
String tag = getDifferentialFlowTag();
// load base configurations and generate base data plane
Map<String, Configuration> baseConfigurations = loadConfigurations(_baseEnvSettings);
File baseDataPlanePath = new File(_baseEnvSettings.getDataPlanePath());
Synthesizer baseDataPlaneSynthesizer = synthesizeDataPlane(
baseConfigurations, baseDataPlanePath);
// load diff configurations and generate diff data plane
Map<String, Configuration> diffConfigurations = loadConfigurations(_diffEnvSettings);
File diffDataPlanePath = new File(_diffEnvSettings.getDataPlanePath());
Synthesizer diffDataPlaneSynthesizer = synthesizeDataPlane(
diffConfigurations, diffDataPlanePath);
Set<String> commonNodes = new TreeSet<String>();
commonNodes.addAll(baseConfigurations.keySet());
commonNodes.retainAll(diffConfigurations.keySet());
NodeSet blacklistNodes = getNodeBlacklist(_diffEnvSettings);
Set<NodeInterfacePair> blacklistInterfaces = getInterfaceBlacklist(_diffEnvSettings);
EdgeSet blacklistEdges = getEdgeBlacklist(_diffEnvSettings);
BlacklistDstIpQuerySynthesizer blacklistQuery = new BlacklistDstIpQuerySynthesizer(
null, blacklistNodes, blacklistInterfaces, blacklistEdges,
baseConfigurations);
// compute composite program and flows
List<Synthesizer> commonEdgeSynthesizers = new ArrayList<Synthesizer>();
commonEdgeSynthesizers.add(baseDataPlaneSynthesizer);
commonEdgeSynthesizers.add(diffDataPlaneSynthesizer);
commonEdgeSynthesizers.add(baseDataPlaneSynthesizer);
List<CompositeNodJob> jobs = new ArrayList<CompositeNodJob>();
// generate local edge reachability and black hole queries
Topology diffTopology = loadTopology(_diffEnvSettings);
EdgeSet diffEdges = diffTopology.getEdges();
for (Edge edge : diffEdges) {
String ingressNode = edge.getNode1();
ReachEdgeQuerySynthesizer reachQuery = new ReachEdgeQuerySynthesizer(
ingressNode, edge, true);
ReachEdgeQuerySynthesizer noReachQuery = new ReachEdgeQuerySynthesizer(
ingressNode, edge, true);
noReachQuery.setNegate(true);
List<QuerySynthesizer> queries = new ArrayList<QuerySynthesizer>();
queries.add(reachQuery);
queries.add(noReachQuery);
queries.add(blacklistQuery);
NodeSet nodes = new NodeSet();
nodes.add(ingressNode);
CompositeNodJob job = new CompositeNodJob(commonEdgeSynthesizers,
queries, nodes, tag);
jobs.add(job);
}
// we also need queries for nodes next to edges that are now missing, in
// the case that those nodes still exist
List<Synthesizer> missingEdgeSynthesizers = new ArrayList<Synthesizer>();
missingEdgeSynthesizers.add(baseDataPlaneSynthesizer);
missingEdgeSynthesizers.add(baseDataPlaneSynthesizer);
Topology baseTopology = loadTopology(_baseEnvSettings);
EdgeSet baseEdges = baseTopology.getEdges();
EdgeSet missingEdges = new EdgeSet();
missingEdges.addAll(baseEdges);
missingEdges.removeAll(diffEdges);
for (Edge missingEdge : missingEdges) {
String ingressNode = missingEdge.getNode1();
if (diffConfigurations.containsKey(ingressNode)) {
ReachEdgeQuerySynthesizer reachQuery = new ReachEdgeQuerySynthesizer(
ingressNode, missingEdge, true);
List<QuerySynthesizer> queries = new ArrayList<QuerySynthesizer>();
queries.add(reachQuery);
queries.add(blacklistQuery);
NodeSet nodes = new NodeSet();
nodes.add(ingressNode);
CompositeNodJob job = new CompositeNodJob(missingEdgeSynthesizers,
queries, nodes, tag);
jobs.add(job);
}
}
Set<Flow> flows = computeCompositeNodOutput(jobs);
Map<String, StringBuilder> trafficFactBins = new LinkedHashMap<String, StringBuilder>();
initTrafficFactBins(trafficFactBins);
StringBuilder wSetFlowOriginate = trafficFactBins.get("SetFlowOriginate");
for (Flow flow : flows) {
wSetFlowOriginate.append(flow.toLBLine());
_logger.output(flow.toString() + "\n");
}
dumpTrafficFacts(trafficFactBins, _baseEnvSettings);
dumpTrafficFacts(trafficFactBins, _diffEnvSettings);
}
private void answerMultipath(MultipathQuestion question) {
checkDataPlaneQuestionDependencies();
String tag = getFlowTag();
Map<String, Configuration> configurations = loadConfigurations();
File dataPlanePath = new File(_envSettings.getDataPlanePath());
Set<Flow> flows = null;
Synthesizer dataPlaneSynthesizer = synthesizeDataPlane(configurations,
dataPlanePath);
List<NodJob> jobs = new ArrayList<NodJob>();
for (String node : configurations.keySet()) {
MultipathInconsistencyQuerySynthesizer query = new MultipathInconsistencyQuerySynthesizer(
node);
NodeSet nodes = new NodeSet();
nodes.add(node);
NodJob job = new NodJob(dataPlaneSynthesizer, query, nodes, tag);
jobs.add(job);
}
flows = computeNodOutput(jobs);
Map<String, StringBuilder> trafficFactBins = new LinkedHashMap<String, StringBuilder>();
initTrafficFactBins(trafficFactBins);
StringBuilder wSetFlowOriginate = trafficFactBins.get("SetFlowOriginate");
for (Flow flow : flows) {
wSetFlowOriginate.append(flow.toLBLine());
}
dumpTrafficFacts(trafficFactBins);
}
private void answerNeighbors(NeighborsQuestion question) {
_logger.output("Boohoo bohoo neighbors\n");
}
private void answerNodes(NodesQuestion question) {
checkConfigurations();
Map<String, Configuration> configurations = loadConfigurations();
// collect nodes nodes
Pattern nodeRegex;
try {
nodeRegex = Pattern.compile(question.getNodeRegex());
}
catch (PatternSyntaxException e) {
throw new BatfishException(
"Supplied regex for nodes is not a valid java regex: \""
+ question.getNodeRegex() + "\"", e);
}
Set<String> nodes = new TreeSet<String>();
if (nodeRegex != null) {
for (String node : configurations.keySet()) {
Matcher nodeMatcher = nodeRegex.matcher(node);
if (nodeMatcher.matches()) {
nodes.add(node);
}
}
}
else {
nodes.addAll(configurations.keySet());
}
Answer answer = new Answer();
answer.setQuestion(question);
try {
JSONObject jObj = new JSONObject();
JSONArray nodeList = new JSONArray();
jObj.put(BfJson.KEY_NODES, nodeList);
for (String node : nodes) {
JSONObject nodeObject = new JSONObject();
nodeObject.put(BfJson.KEY_NODE, configurations.get(node).toJson());
nodeList.put(nodeObject);
}
answer.setStatus(AnswerStatus.SUCCESS);
answer.addAnswerElement(new StringAnswer(jObj.toString()));
}
catch (Exception e) {
BatfishException be = new BatfishException(
"Error in answering NodesQuestion", e);
answer.setStatus(AnswerStatus.FAILURE);
answer.addAnswerElement(new StringAnswer(be.getMessage()));
}
outputAnswer(answer);
}
private void answerProtocolDependencies(ProtocolDependenciesQuestion question) {
checkConfigurations();
Map<String, Configuration> configurations = loadConfigurations();
ProtocolDependencyAnalysis analysis = new ProtocolDependencyAnalysis(
configurations);
analysis.printDependencies(_logger);
analysis.writeGraphs(_settings, _logger);
}
private void answerReachability(ReachabilityQuestion question) {
checkDataPlaneQuestionDependencies();
String tag = getFlowTag();
Map<String, Configuration> configurations = loadConfigurations();
File dataPlanePath = new File(_envSettings.getDataPlanePath());
Set<Flow> flows = null;
Synthesizer dataPlaneSynthesizer = synthesizeDataPlane(configurations,
dataPlanePath);
// collect ingress nodes
Pattern ingressNodeRegex = Pattern.compile(question.getIngressNodeRegex());
Set<String> activeIngressNodes = new TreeSet<String>();
if (ingressNodeRegex != null) {
for (String node : configurations.keySet()) {
Matcher ingressNodeMatcher = ingressNodeRegex.matcher(node);
if (ingressNodeMatcher.matches()) {
activeIngressNodes.add(node);
}
}
}
else {
activeIngressNodes.addAll(configurations.keySet());
}
// collect final nodes
Pattern finalNodeRegex = Pattern.compile(question.getFinalNodeRegex());
Set<String> activeFinalNodes = new TreeSet<String>();
if (finalNodeRegex != null) {
for (String node : configurations.keySet()) {
Matcher finalNodeMatcher = finalNodeRegex.matcher(node);
if (finalNodeMatcher.matches()) {
activeFinalNodes.add(node);
}
}
}
else {
activeFinalNodes.addAll(configurations.keySet());
}
// build query jobs
List<NodJob> jobs = new ArrayList<NodJob>();
for (String ingressNode : activeIngressNodes) {
ReachabilityQuerySynthesizer query = new ReachabilityQuerySynthesizer(
question.getActions(), question.getDstPrefixes(),
question.getDstPortRange(), activeFinalNodes,
Collections.singleton(ingressNode),
question.getIpProtocolRange(), question.getSrcPrefixes(),
question.getSrcPortRange(), question.getIcmpType(),
question.getIcmpCode(), 0 /*
* TODO: allow constraining tcpFlags
* question.getTcpFlags()
*/);
NodeSet nodes = new NodeSet();
nodes.add(ingressNode);
NodJob job = new NodJob(dataPlaneSynthesizer, query, nodes, tag);
jobs.add(job);
}
// run jobs and get resulting flows
flows = computeNodOutput(jobs);
// dump flows to disk
Map<String, StringBuilder> trafficFactBins = new LinkedHashMap<String, StringBuilder>();
initTrafficFactBins(trafficFactBins);
StringBuilder wSetFlowOriginate = trafficFactBins.get("SetFlowOriginate");
for (Flow flow : flows) {
wSetFlowOriginate.append(flow.toLBLine());
}
dumpTrafficFacts(trafficFactBins);
}
private void answerReducedReachability(ReducedReachabilityQuestion question) {
checkDifferentialDataPlaneQuestionDependencies();
String tag = getDifferentialFlowTag();
// load base configurations and generate base data plane
Map<String, Configuration> baseConfigurations = loadConfigurations(_baseEnvSettings);
File baseDataPlanePath = new File(_baseEnvSettings.getDataPlanePath());
Synthesizer baseDataPlaneSynthesizer = synthesizeDataPlane(
baseConfigurations, baseDataPlanePath);
// load diff configurations and generate diff data plane
Map<String, Configuration> diffConfigurations = loadConfigurations(_diffEnvSettings);
File diffDataPlanePath = new File(_diffEnvSettings.getDataPlanePath());
Synthesizer diffDataPlaneSynthesizer = synthesizeDataPlane(
diffConfigurations, diffDataPlanePath);
Set<String> commonNodes = new TreeSet<String>();
commonNodes.addAll(baseConfigurations.keySet());
commonNodes.retainAll(diffConfigurations.keySet());
NodeSet blacklistNodes = getNodeBlacklist(_diffEnvSettings);
Set<NodeInterfacePair> blacklistInterfaces = getInterfaceBlacklist(_diffEnvSettings);
EdgeSet blacklistEdges = getEdgeBlacklist(_diffEnvSettings);
BlacklistDstIpQuerySynthesizer blacklistQuery = new BlacklistDstIpQuerySynthesizer(
null, blacklistNodes, blacklistInterfaces, blacklistEdges,
baseConfigurations);
// compute composite program and flows
List<Synthesizer> synthesizers = new ArrayList<Synthesizer>();
synthesizers.add(baseDataPlaneSynthesizer);
synthesizers.add(diffDataPlaneSynthesizer);
synthesizers.add(baseDataPlaneSynthesizer);
List<CompositeNodJob> jobs = new ArrayList<CompositeNodJob>();
// generate base reachability and diff blackhole and blacklist queries
for (String node : commonNodes) {
ReachableQuerySynthesizer reachableQuery = new ReachableQuerySynthesizer(
node, null);
ReachableQuerySynthesizer blackHoleQuery = new ReachableQuerySynthesizer(
node, null);
blackHoleQuery.setNegate(true);
NodeSet nodes = new NodeSet();
nodes.add(node);
List<QuerySynthesizer> queries = new ArrayList<QuerySynthesizer>();
queries.add(reachableQuery);
queries.add(blackHoleQuery);
queries.add(blacklistQuery);
CompositeNodJob job = new CompositeNodJob(synthesizers, queries,
nodes, tag);
jobs.add(job);
}
Set<Flow> flows = computeCompositeNodOutput(jobs);
Map<String, StringBuilder> trafficFactBins = new LinkedHashMap<String, StringBuilder>();
initTrafficFactBins(trafficFactBins);
StringBuilder wSetFlowOriginate = trafficFactBins.get("SetFlowOriginate");
for (Flow flow : flows) {
wSetFlowOriginate.append(flow.toLBLine());
_logger.debug("Found: " + flow.toString() + "\n");
}
dumpTrafficFacts(trafficFactBins, _baseEnvSettings);
dumpTrafficFacts(trafficFactBins, _diffEnvSettings);
}
private void answerTraceroute(TracerouteQuestion question) {
checkDataPlaneQuestionDependencies();
Set<FlowBuilder> flowBuilders = question.getFlowBuilders();
Map<String, StringBuilder> trafficFactBins = new LinkedHashMap<String, StringBuilder>();
initTrafficFactBins(trafficFactBins);
StringBuilder wSetFlowOriginate = trafficFactBins.get("SetFlowOriginate");
String tag = getFlowTag();
for (FlowBuilder flowBuilder : flowBuilders) {
flowBuilder.setTag(tag);
Flow flow = flowBuilder.build();
wSetFlowOriginate.append(flow.toLBLine());
}
dumpTrafficFacts(trafficFactBins);
}
private void answerVerify(VerifyQuestion question) {
checkConfigurations();
Map<String, Configuration> configurations = loadConfigurations();
VerifyProgram program = question.getProgram();
if (program.getDataPlane()) {
if (program.getDataPlaneBgpAdvertisements()) {
AdvertisementSet bgpAdvertisements = getAdvertisements(_envSettings);
program.setBgpAdvertisements(bgpAdvertisements);
}
if (program.getDataPlaneRoutes()) {
RouteSet routes = getRoutes(_envSettings);
program.setRoutes(routes);
}
}
program.execute(configurations, _logger, _settings);
if (program.getAssertions()) {
int totalAssertions = program.getTotalAssertions();
int failedAssertions = program.getFailedAssertions();
int passedAssertions = totalAssertions - failedAssertions;
double percentPassed = 100 * ((double) passedAssertions)
/ totalAssertions;
_logger.outputf("%d/%d (%.1f%%) assertions passed.\n",
passedAssertions, totalAssertions, percentPassed);
if (!program.getUnsafe()) {
_logger.output("No violations detected\n");
}
}
String jsonPath = _settings.getAnswerJsonPath();
if (jsonPath != null) {
String jsonOutput = program.getJson();
Util.writeFile(jsonPath, jsonOutput);
}
}
/**
* This function extracts predicate type information from the logic files. It
* is meant only to be called during the build process, and should never be
* executed from a jar
*/
private void buildPredicateInfo() {
Path logicBinDirPath = null;
URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain()
.getCodeSource().getLocation();
String logicSourceString = logicSourceURL.toString();
if (logicSourceString.startsWith("onejar:")) {
throw new BatfishException(
"buildPredicateInfo() should never be called from within a jar");
}
String logicPackageResourceName = LogicResourceLocator.class.getPackage()
.getName().replace('.', SEPARATOR.charAt(0));
try {
logicBinDirPath = Paths.get(LogicResourceLocator.class
.getClassLoader().getResource(logicPackageResourceName).toURI());
}
catch (URISyntaxException e) {
throw new BatfishException("Failed to resolve logic output directory",
e);
}
Path logicSrcDirPath = Paths.get(_settings.getLogicSrcDir());
final Set<Path> logicFiles = new TreeSet<Path>();
try {
Files.walkFileTree(logicSrcDirPath,
new java.nio.file.SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file,
BasicFileAttributes attrs) throws IOException {
String name = file.getFileName().toString();
if (!name.equals("BaseFacts.logic")
&& !name.equals("pedantic.logic")
&& !name.endsWith("_rules.logic")
&& !name.startsWith("service_")
&& name.endsWith(".logic")) {
logicFiles.add(file);
}
return super.visitFile(file, attrs);
}
});
}
catch (IOException e) {
throw new BatfishException("Could not make list of logic files", e);
}
PredicateValueTypeMap predicateValueTypes = new PredicateValueTypeMap();
QualifiedNameMap qualifiedNameMap = new QualifiedNameMap();
FunctionSet functions = new FunctionSet();
PredicateSemantics predicateSemantics = new PredicateSemantics();
List<ParserRuleContext> trees = new ArrayList<ParserRuleContext>();
for (Path logicFilePath : logicFiles) {
String input = Util.readFile(logicFilePath.toFile());
LogiQLCombinedParser parser = new LogiQLCombinedParser(input,
_settings);
ParserRuleContext tree = parse(parser, logicFilePath.toString());
trees.add(tree);
}
ParseTreeWalker walker = new ParseTreeWalker();
for (ParserRuleContext tree : trees) {
LogQLPredicateInfoExtractor extractor = new LogQLPredicateInfoExtractor(
predicateValueTypes);
walker.walk(extractor, tree);
}
for (ParserRuleContext tree : trees) {
LogiQLPredicateInfoResolver resolver = new LogiQLPredicateInfoResolver(
predicateValueTypes, qualifiedNameMap, functions,
predicateSemantics);
walker.walk(resolver, tree);
}
PredicateInfo predicateInfo = new PredicateInfo(predicateSemantics,
predicateValueTypes, functions, qualifiedNameMap);
File predicateInfoFile = logicBinDirPath.resolve(PREDICATE_INFO_FILENAME)
.toFile();
serializeObject(predicateInfo, predicateInfoFile);
}
private void checkBaseDirExists() {
File baseDir = new File(_settings.getAutoBaseDir());
if (!baseDir.exists()) {
throw new CleanBatfishException("Test rig does not exist: \""
+ baseDir.getName() + "\"");
}
}
private void checkComputeControlPlaneFacts() {
checkConfigurations();
checkEnvironmentExists(_baseEnvSettings);
if (_settings.getDiffActive()) {
checkDataPlane(_baseEnvSettings);
checkDiffEnvironmentExists();
}
}
private void checkComputeNxtnetRelations(EnvironmentSettings envSettings) {
checkControlPlaneFacts(envSettings);
}
private void checkConfigurations() {
String serializedConfigPath = _settings.getSerializeIndependentPath();
File dir = new File(serializedConfigPath);
File[] serializedConfigs = dir.listFiles();
if (serializedConfigs == null) {
throw new CleanBatfishException(
"Missing compiled vendor-independent configurations for this test-rig\n");
}
else if (serializedConfigs.length == 0) {
throw new CleanBatfishException(
"Nothing to do: Set of vendor-independent configurations for this test-rig is empty\n");
}
}
private void checkControlPlaneFacts(EnvironmentSettings envSettings) {
File cpFactsDir = new File(envSettings.getControlPlaneFactsDir());
if (!cpFactsDir.exists()) {
throw new CleanBatfishException(
"Missing control plane facts for environment: \""
+ envSettings.getName() + "\"\n");
}
}
private void checkDataPlane(EnvironmentSettings envSettings) {
String dpPath = envSettings.getDataPlanePath();
File dp = new File(dpPath);
if (!dp.exists()) {
throw new CleanBatfishException("Missing data plane for environment: "
+ envSettings.getName() + "\n");
}
}
private void checkDataPlaneFacts(EnvironmentSettings envSettings) {
checkEnvironmentExists(envSettings);
File dataPlaneFactDir = new File(
envSettings.getNxtnetDataPlaneOutputDir());
if (!dataPlaneFactDir.exists()) {
throw new CleanBatfishException(
"Missing computed data plane facts for environment: "
+ envSettings.getName() + "\n");
}
}
private void checkDataPlaneQuestionDependencies() {
checkDataPlaneQuestionDependencies(_envSettings);
}
private void checkDataPlaneQuestionDependencies(
EnvironmentSettings envSettings) {
checkConfigurations();
checkDataPlane(envSettings);
}
private void checkDiffEnvironmentExists() {
checkDiffEnvironmentSpecified();
checkEnvironmentExists(_diffEnvSettings);
}
private void checkDiffEnvironmentSpecified() {
if (_settings.getDiffEnvironmentName() == null) {
throw new CleanBatfishException(
"No differential environment specified for differential question");
}
}
private void checkDifferentialDataPlaneQuestionDependencies() {
checkDiffEnvironmentSpecified();
checkConfigurations();
checkDataPlane(_baseEnvSettings);
checkDataPlane(_diffEnvSettings);
}
private void checkEnvironmentExists(EnvironmentSettings envSettings) {
checkBaseDirExists();
if (!new File(envSettings.getDataPlanePath()).getParentFile().exists()) {
throw new CleanBatfishException("Environment not initialized: \""
+ envSettings.getName() + "\"");
}
}
private void checkQuery(EnvironmentSettings envSettings,
Set<String> predicateNames) {
Set<String> dpIntersect = new HashSet<String>();
dpIntersect.addAll(predicateNames);
dpIntersect.retainAll(getNxtnetDataPlaneOutputSymbols());
if (dpIntersect.size() > 0) {
checkDataPlaneFacts(envSettings);
}
Set<String> trafficIntersect = new HashSet<String>();
trafficIntersect.addAll(predicateNames);
trafficIntersect.retainAll(getNxtnetTrafficOutputSymbols());
if (trafficIntersect.size() > 0) {
checkTrafficFacts(envSettings);
}
}
private void checkTrafficFacts(EnvironmentSettings envSettings) {
checkEnvironmentExists(envSettings);
File trafficFactDir = new File(envSettings.getNxtnetTrafficOutputDir());
if (!trafficFactDir.exists()) {
throw new CleanBatfishException(
"Missing computed traffic facts for environment: "
+ envSettings.getName() + "\n");
}
}
private void cleanupLogicDir() {
if (_tmpLogicDir != null) {
try {
FileUtils.deleteDirectory(_tmpLogicDir);
}
catch (IOException e) {
throw new BatfishException(
"Error cleaning up temporary logic directory", e);
}
_tmpLogicDir = null;
}
}
@Override
public void close() throws Exception {
}
private Set<Flow> computeCompositeNodOutput(List<CompositeNodJob> jobs) {
_logger.info("\n*** EXECUTING COMPOSITE NOD JOBS ***\n");
resetTimer();
Set<Flow> flows = new TreeSet<Flow>();
BatfishJobExecutor<CompositeNodJob, NodJobResult, Set<Flow>> executor = new BatfishJobExecutor<CompositeNodJob, NodJobResult, Set<Flow>>(
_settings, _logger);
executor.executeJobs(jobs, flows);
printElapsedTime();
return flows;
}
private void computeControlPlaneFacts(Map<String, StringBuilder> cpFactBins,
boolean differentialContext, EnvironmentSettings envSettings) {
checkComputeControlPlaneFacts();
if (_settings.getUsePrecomputedRoutes()) {
List<String> precomputedRoutesPaths = _settings
.getPrecomputedRoutesPaths();
populatePrecomputedRoutes(precomputedRoutesPaths, cpFactBins);
}
if (_settings.getUsePrecomputedIbgpNeighbors()) {
populatePrecomputedIbgpNeighbors(
_settings.getPrecomputedIbgpNeighborsPath(), cpFactBins);
}
if (_settings.getUsePrecomputedBgpAdvertisements()) {
populatePrecomputedBgpAdvertisements(
_settings.getPrecomputedBgpAdvertisementsPath(), cpFactBins);
}
Map<String, Configuration> configurations = loadConfigurations(envSettings);
CommunitySet allCommunities = new CommunitySet();
processExternalBgpAnnouncements(configurations, envSettings, cpFactBins,
allCommunities);
Topology topology = computeTopology(_settings.getTestRigPath(),
configurations, cpFactBins);
String edgeBlacklistPath = envSettings.getEdgeBlacklistPath();
String serializedTopologyPath = envSettings.getSerializedTopologyPath();
InterfaceSet flowSinks = null;
if (differentialContext) {
flowSinks = getFlowSinkSet(_baseEnvSettings.getDataPlanePath());
}
EdgeSet blacklistEdges = getEdgeBlacklist(envSettings);
if (edgeBlacklistPath != null) {
File edgeBlacklistPathAsFile = new File(edgeBlacklistPath);
if (edgeBlacklistPathAsFile.exists()) {
EdgeSet edges = topology.getEdges();
edges.removeAll(blacklistEdges);
}
}
NodeSet blacklistNodes = getNodeBlacklist(envSettings);
if (blacklistNodes != null) {
if (differentialContext) {
flowSinks.removeNodes(blacklistNodes);
}
for (String blacklistNode : blacklistNodes) {
topology.removeNode(blacklistNode);
}
}
Set<NodeInterfacePair> blacklistInterfaces = getInterfaceBlacklist(envSettings);
if (blacklistInterfaces != null) {
for (NodeInterfacePair blacklistInterface : blacklistInterfaces) {
topology.removeInterface(blacklistInterface);
if (differentialContext) {
flowSinks.remove(blacklistInterface);
}
}
}
if (!differentialContext) {
flowSinks = computeFlowSinks(configurations, topology);
}
writeTopologyFacts(topology, cpFactBins);
populateConfigurationFactBins(configurations.values(), allCommunities,
cpFactBins);
writeFlowSinkFacts(flowSinks, cpFactBins);
if (!_logger.isActive(BatfishLogger.LEVEL_INFO)) {
_logger.output("Facts generated successfully.\n");
}
if (_settings.getDumpControlPlaneFacts()) {
dumpControlPlaneFacts(envSettings, cpFactBins);
}
// serialize topology
File topologyPath = new File(serializedTopologyPath);
_logger.info("Serializing topology...");
serializeObject(topology, topologyPath);
_logger.info("OK\n");
}
private void computeDataPlane() {
computeDataPlane(_envSettings);
}
private void computeDataPlane(EnvironmentSettings envSettings) {
checkDataPlaneFacts(envSettings);
String dataPlanePath = envSettings.getDataPlanePath();
if (dataPlanePath == null) {
throw new BatfishException("Missing path to data plane");
}
File dataPlanePathAsFile = new File(dataPlanePath);
computeDataPlane(dataPlanePathAsFile, envSettings);
}
private void computeDataPlane(File dataPlanePath,
EnvironmentSettings envSettings) {
_logger.info("\n*** COMPUTING DATA PLANE STRUCTURES ***\n");
resetTimer();
_logger.info("Retrieving flow sink information...");
InterfaceSet flowSinks = getFlowSinkSet(envSettings);
_logger.info("OK\n");
Topology topology = loadTopology(envSettings);
EdgeSet topologyEdges = topology.getEdges();
_logger.info("Caclulating forwarding rules...");
FibMap fibs = getRouteForwardingRules(envSettings);
PolicyRouteFibNodeMap policyRouteFibNodeMap = getPolicyRouteFibNodeMap(envSettings);
_logger.info("OK\n");
DataPlane dataPlane = new DataPlane(flowSinks, topologyEdges, fibs,
policyRouteFibNodeMap);
_logger.info("Serializing data plane...");
serializeObject(dataPlane, dataPlanePath);
_logger.info("OK\n");
printElapsedTime();
}
private InterfaceSet computeFlowSinks(
Map<String, Configuration> configurations, Topology topology) {
InterfaceSet flowSinks = new InterfaceSet();
InterfaceSet topologyInterfaces = new InterfaceSet();
for (Edge edge : topology.getEdges()) {
topologyInterfaces.add(edge.getInterface1());
topologyInterfaces.add(edge.getInterface2());
}
for (Configuration node : configurations.values()) {
String hostname = node.getHostname();
for (Interface iface : node.getInterfaces().values()) {
String ifaceName = iface.getName();
NodeInterfacePair p = new NodeInterfacePair(hostname, ifaceName);
if (iface.getActive() && !iface.isLoopback(node.getVendor())
&& !topologyInterfaces.contains(p)) {
flowSinks.add(p);
}
}
}
return flowSinks;
}
private Set<Flow> computeNodOutput(List<NodJob> jobs) {
_logger.info("\n*** EXECUTING NOD JOBS ***\n");
resetTimer();
Set<Flow> flows = new TreeSet<Flow>();
BatfishJobExecutor<NodJob, NodJobResult, Set<Flow>> executor = new BatfishJobExecutor<NodJob, NodJobResult, Set<Flow>>(
_settings, _logger);
executor.executeJobs(jobs, flows);
printElapsedTime();
return flows;
}
private <Key> void computeNodSatOutput(List<NodSatJob<Key>> jobs,
Map<Key, Boolean> output) {
_logger.info("\n*** EXECUTING NOD SAT JOBS ***\n");
resetTimer();
BatfishJobExecutor<NodSatJob<Key>, NodSatResult<Key>, Map<Key, Boolean>> executor = new BatfishJobExecutor<NodSatJob<Key>, NodSatResult<Key>, Map<Key, Boolean>>(
_settings, _logger);
executor.executeJobs(jobs, output);
printElapsedTime();
}
public Topology computeTopology(String testRigPath,
Map<String, Configuration> configurations,
Map<String, StringBuilder> factBins) {
Path topologyFilePath = Paths.get(testRigPath, TOPOLOGY_FILENAME);
Topology topology;
// Get generated facts from topology file
if (Files.exists(topologyFilePath)) {
topology = processTopologyFile(topologyFilePath.toFile(), factBins);
}
else {
// guess adjacencies based on interface subnetworks
_logger
.info("*** (GUESSING TOPOLOGY IN ABSENCE OF EXPLICIT FILE) ***\n");
EdgeSet edges = synthesizeTopology(configurations);
topology = new Topology(edges);
}
return topology;
}
private Map<String, Configuration> convertConfigurations(
Map<String, GenericConfigObject> vendorConfigurations) {
_logger
.info("\n*** CONVERTING VENDOR CONFIGURATIONS TO INDEPENDENT FORMAT ***\n");
resetTimer();
Map<String, Configuration> configurations = new TreeMap<String, Configuration>();
List<ConvertConfigurationJob> jobs = new ArrayList<ConvertConfigurationJob>();
for (String hostname : vendorConfigurations.keySet()) {
Warnings warnings = new Warnings(_settings.getPedanticAsError(),
_settings.getPedanticRecord()
&& _logger.isActive(BatfishLogger.LEVEL_PEDANTIC),
_settings.getRedFlagAsError(), _settings.getRedFlagRecord()
&& _logger.isActive(BatfishLogger.LEVEL_REDFLAG),
_settings.getUnimplementedAsError(),
_settings.getUnimplementedRecord()
&& _logger.isActive(BatfishLogger.LEVEL_UNIMPLEMENTED),
_settings.printParseTree());
GenericConfigObject vc = vendorConfigurations.get(hostname);
ConvertConfigurationJob job = new ConvertConfigurationJob(_settings,
vc, hostname, warnings);
jobs.add(job);
}
BatfishJobExecutor<ConvertConfigurationJob, ConvertConfigurationResult, Map<String, Configuration>> executor = new BatfishJobExecutor<ConvertConfigurationJob, ConvertConfigurationResult, Map<String, Configuration>>(
_settings, _logger);
executor.executeJobs(jobs, configurations);
printElapsedTime();
return configurations;
}
private boolean dataPlaneDependenciesExist(EnvironmentSettings envSettings) {
checkConfigurations();
String dpPath = envSettings.getDataPlanePath();
File dp = new File(dpPath);
return dp.exists();
}
public Map<String, Configuration> deserializeConfigurations(
String serializedConfigPath) {
_logger
.info("\n*** DESERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n");
resetTimer();
Map<String, Configuration> configurations = new TreeMap<String, Configuration>();
File dir = new File(serializedConfigPath);
File[] serializedConfigs = dir.listFiles();
if (serializedConfigs == null) {
throw new BatfishException(
"Error reading vendor-independent configs directory: \""
+ dir.toString() + "\"");
}
for (File serializedConfig : serializedConfigs) {
String name = serializedConfig.getName();
_logger.debug("Reading config: \"" + serializedConfig + "\"");
Object object = deserializeObject(serializedConfig);
Configuration c = (Configuration) object;
configurations.put(name, c);
_logger.debug(" ...OK\n");
}
printElapsedTime();
return configurations;
}
private Object deserializeObject(File inputFile) {
FileInputStream fis;
Object o = null;
ObjectInputStream ois;
try {
fis = new FileInputStream(inputFile);
if (!isJavaSerializationData(inputFile)) {
XStream xstream = new XStream(new DomDriver("UTF-8"));
ois = xstream.createObjectInputStream(fis);
}
else {
ois = new ObjectInputStream(fis);
}
o = ois.readObject();
ois.close();
}
catch (IOException | ClassNotFoundException e) {
throw new BatfishException("Failed to deserialize object from file: "
+ inputFile.toString(), e);
}
return o;
}
public Map<String, GenericConfigObject> deserializeVendorConfigurations(
String serializedVendorConfigPath) {
_logger.info("\n*** DESERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n");
resetTimer();
Map<String, GenericConfigObject> vendorConfigurations = new TreeMap<String, GenericConfigObject>();
File dir = new File(serializedVendorConfigPath);
File[] serializedConfigs = dir.listFiles();
if (serializedConfigs == null) {
throw new BatfishException("Error reading vendor configs directory");
}
for (File serializedConfig : serializedConfigs) {
String name = serializedConfig.getName();
_logger.info("Reading vendor config: \"" + serializedConfig + "\"");
Object object = deserializeObject(serializedConfig);
GenericConfigObject vc = (GenericConfigObject) object;
vendorConfigurations.put(name, vc);
_logger.info("...OK\n");
}
printElapsedTime();
return vendorConfigurations;
}
private void disableUnusableVpnInterfaces(
Map<String, Configuration> configurations,
EnvironmentSettings envSettings) {
Environment environment = new Environment();
environment.setConfigurations(configurations);
environment.initRemoteIpsecVpns();
for (Configuration c : configurations.values()) {
for (IpsecVpn vpn : c.getIpsecVpns().values()) {
if (vpn.getRemoteIpsecVpn() == null) {
String hostname = c.getHostname();
Interface bindInterface = vpn.getBindInterface();
if (bindInterface != null) {
bindInterface.setActive(false);
String bindInterfaceName = bindInterface.getName();
_logger
.warnf(
"WARNING: Disabling unusable vpn interface because we cannot determine remote endpoint: \"%s:%s\"\n",
hostname, bindInterfaceName);
}
}
}
}
}
private void dumpControlPlaneFacts(EnvironmentSettings envSettings,
Map<String, StringBuilder> factBins) {
_logger.info("\n*** DUMPING CONTROL PLANE FACTS ***\n");
dumpFacts(factBins, envSettings.getControlPlaneFactsDir());
}
private void dumpFacts(Map<String, StringBuilder> factBins, String factsDir) {
resetTimer();
Path factsDirPath = Paths.get(factsDir);
try {
Files.createDirectories(factsDirPath);
for (String factsFilename : factBins.keySet()) {
String[] factsLines = factBins.get(factsFilename).toString()
.split("\n");
Set<String> uniqueFacts = new TreeSet<String>();
for (int i = 1; i < factsLines.length; i++) {
uniqueFacts.add(factsLines[i]);
}
StringBuilder factsBuilder = new StringBuilder();
factsBuilder.append(factsLines[0] + "\n");
for (String factsLine : uniqueFacts) {
factsBuilder.append(factsLine + "\n");
}
String facts = factsBuilder.toString();
Path factsFilePath = factsDirPath.resolve(factsFilename);
_logger.info("Writing: \""
+ factsFilePath.toAbsolutePath().toString() + "\"\n");
FileUtils.write(factsFilePath.toFile(), facts);
}
}
catch (IOException e) {
throw new BatfishException("Failed to write fact dump file(s)", e);
}
printElapsedTime();
}
private void dumpTrafficFacts(Map<String, StringBuilder> factBins) {
dumpTrafficFacts(factBins, _envSettings);
}
private void dumpTrafficFacts(Map<String, StringBuilder> factBins,
EnvironmentSettings envSettings) {
_logger.info("\n*** DUMPING TRAFFIC FACTS ***\n");
dumpFacts(factBins, envSettings.getTrafficFactsDir());
}
private boolean environmentExists(EnvironmentSettings envSettings) {
checkBaseDirExists();
return new File(envSettings.getDataPlanePath()).getParentFile().exists();
}
private void flatten(String inputPath, String outputPath) {
Map<File, String> configurationData = readConfigurationFiles(inputPath,
BfConsts.RELPATH_CONFIGURATIONS_DIR);
Map<File, String> outputConfigurationData = new TreeMap<File, String>();
File inputFolder = new File(inputPath);
File[] configs = inputFolder.listFiles();
if (configs == null) {
throw new BatfishException("Error reading configs from input test rig");
}
try {
Files.createDirectories(Paths.get(outputPath));
}
catch (IOException e) {
throw new BatfishException(
"Could not create output testrig directory", e);
}
_logger.info("\n*** FLATTENING TEST RIG ***\n");
resetTimer();
List<FlattenVendorConfigurationJob> jobs = new ArrayList<FlattenVendorConfigurationJob>();
for (File inputFile : configurationData.keySet()) {
Warnings warnings = new Warnings(_settings.getPedanticAsError(),
_settings.getPedanticRecord()
&& _logger.isActive(BatfishLogger.LEVEL_PEDANTIC),
_settings.getRedFlagAsError(), _settings.getRedFlagRecord()
&& _logger.isActive(BatfishLogger.LEVEL_REDFLAG),
_settings.getUnimplementedAsError(),
_settings.getUnimplementedRecord()
&& _logger.isActive(BatfishLogger.LEVEL_UNIMPLEMENTED),
_settings.printParseTree());
String fileText = configurationData.get(inputFile);
String name = inputFile.getName();
File outputFile = Paths.get(outputPath,
BfConsts.RELPATH_CONFIGURATIONS_DIR, name).toFile();
FlattenVendorConfigurationJob job = new FlattenVendorConfigurationJob(
_settings, fileText, inputFile, outputFile, warnings);
jobs.add(job);
}
BatfishJobExecutor<FlattenVendorConfigurationJob, FlattenVendorConfigurationResult, Map<File, String>> executor = new BatfishJobExecutor<FlattenVendorConfigurationJob, FlattenVendorConfigurationResult, Map<File, String>>(
_settings, _logger);
executor.executeJobs(jobs, outputConfigurationData);
printElapsedTime();
for (Entry<File, String> e : outputConfigurationData.entrySet()) {
File outputFile = e.getKey();
String flatConfigText = e.getValue();
String outputFileAsString = outputFile.toString();
_logger.debug("Writing config to \"" + outputFileAsString + "\"...");
Util.writeFile(outputFileAsString, flatConfigText);
_logger.debug("OK\n");
}
Path inputTopologyPath = Paths.get(inputPath, TOPOLOGY_FILENAME);
Path outputTopologyPath = Paths.get(outputPath, TOPOLOGY_FILENAME);
if (Files.isRegularFile(inputTopologyPath)) {
String topologyFileText = Util.readFile(inputTopologyPath.toFile());
Util.writeFile(outputTopologyPath.toString(), topologyFileText);
}
}
private void generateOspfConfigs(String topologyPath, String outputPath) {
File topologyFilePath = new File(topologyPath);
Topology topology = parseTopology(topologyFilePath);
Map<String, Configuration> configs = new TreeMap<String, Configuration>();
NodeSet allNodes = new NodeSet();
Map<NodeInterfacePair, Set<NodeInterfacePair>> interfaceMap = new HashMap<NodeInterfacePair, Set<NodeInterfacePair>>();
// first we collect set of all mentioned nodes, and build mapping from
// each interface to the set of interfaces that connect to each other
for (Edge edge : topology.getEdges()) {
allNodes.add(edge.getNode1());
allNodes.add(edge.getNode2());
NodeInterfacePair interface1 = new NodeInterfacePair(edge.getNode1(),
edge.getInt1());
NodeInterfacePair interface2 = new NodeInterfacePair(edge.getNode2(),
edge.getInt2());
Set<NodeInterfacePair> interfaceSet = interfaceMap.get(interface1);
if (interfaceSet == null) {
interfaceSet = new HashSet<NodeInterfacePair>();
}
interfaceMap.put(interface1, interfaceSet);
interfaceMap.put(interface2, interfaceSet);
interfaceSet.add(interface1);
interfaceSet.add(interface2);
}
// then we create configs for every mentioned node
for (String hostname : allNodes) {
Configuration config = new Configuration(hostname);
configs.put(hostname, config);
}
// Now we create interfaces for each edge and record the number of
// neighbors so we know how large to make the subnet
long currentStartingIpAsLong = new Ip(GEN_OSPF_STARTING_IP).asLong();
Set<Set<NodeInterfacePair>> interfaceSets = new HashSet<Set<NodeInterfacePair>>();
interfaceSets.addAll(interfaceMap.values());
for (Set<NodeInterfacePair> interfaceSet : interfaceSets) {
int numInterfaces = interfaceSet.size();
if (numInterfaces < 2) {
throw new BatfishException(
"The following interface set contains less than two interfaces: "
+ interfaceSet.toString());
}
int numHostBits = 0;
for (int shiftedValue = numInterfaces - 1; shiftedValue != 0; shiftedValue >>= 1, numHostBits++) {
}
int subnetBits = 32 - numHostBits;
int offset = 0;
for (NodeInterfacePair currentPair : interfaceSet) {
Ip ip = new Ip(currentStartingIpAsLong + offset);
Prefix prefix = new Prefix(ip, subnetBits);
String ifaceName = currentPair.getInterface();
Interface iface = new Interface(ifaceName, configs.get(currentPair
.getHostname()));
iface.setPrefix(prefix);
// dirty hack for setting bandwidth for now
double ciscoBandwidth = org.batfish.representation.cisco.Interface
.getDefaultBandwidth(ifaceName);
double juniperBandwidth = org.batfish.representation.juniper.Interface
.getDefaultBandwidthByName(ifaceName);
double bandwidth = Math.min(ciscoBandwidth, juniperBandwidth);
iface.setBandwidth(bandwidth);
String hostname = currentPair.getHostname();
Configuration config = configs.get(hostname);
config.getInterfaces().put(ifaceName, iface);
offset++;
}
currentStartingIpAsLong += (1 << numHostBits);
}
for (Configuration config : configs.values()) {
// use cisco arbitrarily
config.setVendor(ConfigurationFormat.CISCO);
OspfProcess proc = new OspfProcess();
config.setOspfProcess(proc);
proc.setReferenceBandwidth(org.batfish.representation.cisco.OspfProcess.DEFAULT_REFERENCE_BANDWIDTH);
long backboneArea = 0;
OspfArea area = new OspfArea(backboneArea);
proc.getAreas().put(backboneArea, area);
area.getInterfaces().addAll(config.getInterfaces().values());
}
serializeIndependentConfigs(configs, outputPath);
}
private void generateStubs(String inputRole, int stubAs,
String interfaceDescriptionRegex) {
Map<String, Configuration> configs = loadConfigurations();
Pattern pattern = Pattern.compile(interfaceDescriptionRegex);
Map<String, Configuration> stubConfigurations = new TreeMap<String, Configuration>();
_logger.info("\n*** GENERATING STUBS ***\n");
resetTimer();
// load old node-roles to be updated at end
RoleSet stubRoles = new RoleSet();
stubRoles.add(STUB_ROLE);
File nodeRolesPath = new File(_settings.getNodeRolesPath());
_logger.info("Deserializing old node-roles mappings: \"" + nodeRolesPath
+ "\" ...");
NodeRoleMap nodeRoles = (NodeRoleMap) deserializeObject(nodeRolesPath);
_logger.info("OK\n");
// create origination policy common to all stubs
String stubOriginationPolicyName = "~STUB_ORIGINATION_POLICY~";
PolicyMap stubOriginationPolicy = new PolicyMap(stubOriginationPolicyName);
PolicyMapClause clause = new PolicyMapClause();
stubOriginationPolicy.getClauses().add(clause);
String stubOriginationRouteFilterListName = "~STUB_ORIGINATION_ROUTE_FILTER~";
RouteFilterList rf = new RouteFilterList(
stubOriginationRouteFilterListName);
RouteFilterLine rfl = new RouteFilterLine(LineAction.ACCEPT, Prefix.ZERO,
new SubRange(0, 0));
rf.addLine(rfl);
PolicyMapMatchRouteFilterListLine matchLine = new PolicyMapMatchRouteFilterListLine(
Collections.singleton(rf));
clause.getMatchLines().add(matchLine);
clause.setAction(PolicyMapAction.PERMIT);
Set<String> skipWarningNodes = new HashSet<String>();
for (Configuration config : configs.values()) {
if (!config.getRoles().contains(inputRole)) {
continue;
}
for (BgpNeighbor neighbor : config.getBgpProcess().getNeighbors()
.values()) {
if (!neighbor.getRemoteAs().equals(stubAs)) {
continue;
}
Prefix neighborPrefix = neighbor.getPrefix();
if (neighborPrefix.getPrefixLength() != 32) {
throw new BatfishException(
"do not currently handle generating stubs based on dynamic bgp sessions");
}
Ip neighborAddress = neighborPrefix.getAddress();
int edgeAs = neighbor.getLocalAs();
/*
* Now that we have the ip address of the stub, we want to find the
* interface that connects to it. We will extract the hostname for
* the stub from the description of this interface using the
* supplied regex.
*/
boolean found = false;
for (Interface iface : config.getInterfaces().values()) {
Prefix prefix = iface.getPrefix();
if (prefix == null || !prefix.contains(neighborAddress)) {
continue;
}
// the neighbor address falls within the network assigned to this
// interface, so now we check the description
String description = iface.getDescription();
Matcher matcher = pattern.matcher(description);
if (matcher.find()) {
String hostname = matcher.group(1);
if (configs.containsKey(hostname)) {
Configuration duplicateConfig = configs.get(hostname);
if (!duplicateConfig.getRoles().contains(STUB_ROLE)
|| duplicateConfig.getRoles().size() != 1) {
throw new BatfishException(
"A non-generated node with hostname: \""
+ hostname
+ "\" already exists in network under analysis");
}
else {
if (!skipWarningNodes.contains(hostname)) {
_logger
.warn("WARNING: Overwriting previously generated node: \""
+ hostname + "\"\n");
skipWarningNodes.add(hostname);
}
}
}
found = true;
Configuration stub = stubConfigurations.get(hostname);
// create stub if it doesn't exist yet
if (stub == null) {
stub = new Configuration(hostname);
stubConfigurations.put(hostname, stub);
// create flow sink interface for stub with common deatils
String flowSinkName = "TenGibabitEthernet100/100";
Interface flowSink = new Interface(flowSinkName, stub);
flowSink.setPrefix(Prefix.ZERO);
flowSink.setActive(true);
flowSink.setBandwidth(10E9d);
stub.getInterfaces().put(flowSinkName, flowSink);
stub.setBgpProcess(new BgpProcess());
stub.getPolicyMaps().put(stubOriginationPolicyName,
stubOriginationPolicy);
stub.getRouteFilterLists().put(
stubOriginationRouteFilterListName, rf);
stub.setVendor(ConfigurationFormat.CISCO);
stub.setRoles(stubRoles);
nodeRoles.put(hostname, stubRoles);
}
// create interface that will on which peering will occur
Map<String, Interface> stubInterfaces = stub.getInterfaces();
String stubInterfaceName = "TenGigabitEthernet0/"
+ (stubInterfaces.size() - 1);
Interface stubInterface = new Interface(stubInterfaceName,
stub);
stubInterfaces.put(stubInterfaceName, stubInterface);
stubInterface.setPrefix(new Prefix(neighborAddress, prefix
.getPrefixLength()));
stubInterface.setActive(true);
stubInterface.setBandwidth(10E9d);
// create neighbor within bgp process
BgpNeighbor edgeNeighbor = new BgpNeighbor(prefix, stub);
edgeNeighbor.getOriginationPolicies().add(
stubOriginationPolicy);
edgeNeighbor.setRemoteAs(edgeAs);
edgeNeighbor.setLocalAs(stubAs);
edgeNeighbor.setSendCommunity(true);
edgeNeighbor.setDefaultMetric(0);
stub.getBgpProcess().getNeighbors()
.put(edgeNeighbor.getPrefix(), edgeNeighbor);
break;
}
else {
throw new BatfishException(
"Unable to derive stub hostname from interface description: \""
+ description + "\" using regex: \""
+ interfaceDescriptionRegex + "\"");
}
}
if (!found) {
throw new BatfishException(
"Could not determine stub hostname corresponding to ip: \""
+ neighborAddress.toString()
+ "\" listed as neighbor on router: \""
+ config.getHostname() + "\"");
}
}
}
// write updated node-roles mappings to disk
_logger.info("Serializing updated node-roles mappings: \""
+ nodeRolesPath + "\" ...");
serializeObject(nodeRoles, nodeRolesPath);
_logger.info("OK\n");
printElapsedTime();
// write stubs to disk
serializeIndependentConfigs(stubConfigurations,
_settings.getSerializeIndependentPath());
}
private AdvertisementSet getAdvertisements(EnvironmentSettings envSettings) {
checkDataPlaneFacts(_envSettings);
AdvertisementSet adverts = new AdvertisementSet();
EntityTable entityTable = initEntityTable(envSettings);
Relation relation = getRelation(envSettings,
BGP_ADVERTISEMENT_PREDICATE_NAME);
List<BgpAdvertisement> advertList = relation.getColumns().get(0)
.asBgpAdvertisementList(entityTable);
adverts.addAll(advertList);
return adverts;
}
public Map<String, Configuration> getConfigurations(
String serializedVendorConfigPath) {
Map<String, GenericConfigObject> vendorConfigurations = deserializeVendorConfigurations(serializedVendorConfigPath);
Map<String, Configuration> configurations = convertConfigurations(vendorConfigurations);
return configurations;
}
private Map<String, Configuration> getDeltaConfigurations(
EnvironmentSettings envSettings) {
String deltaConfigurationsDir = envSettings.getDeltaConfigurationsDir();
if (deltaConfigurationsDir != null) {
File deltaConfigurationsDirAsFile = new File(deltaConfigurationsDir);
if (deltaConfigurationsDirAsFile.exists()) {
File configParentDir = deltaConfigurationsDirAsFile.getParentFile();
Map<File, String> deltaConfigsText = readConfigurationFiles(
configParentDir.toString(),
BfConsts.RELPATH_CONFIGURATIONS_DIR);
Map<String, VendorConfiguration> vendorDeltaConfigs = parseVendorConfigurations(deltaConfigsText);
// convert the map to the right type
Map<String, GenericConfigObject> castedConfigs = new HashMap<String, GenericConfigObject>();
for (String name : vendorDeltaConfigs.keySet()) {
castedConfigs.put(name, vendorDeltaConfigs.get(name));
}
Map<String, Configuration> deltaConfigs = convertConfigurations(castedConfigs);
return deltaConfigs;
}
}
return Collections.<String, Configuration> emptyMap();
}
private String getDifferentialFlowTag() {
return _settings.getQuestionName() + ":" + _baseEnvSettings.getName()
+ ":" + _diffEnvSettings.getName();
}
private EdgeSet getEdgeBlacklist(EnvironmentSettings envSettings) {
EdgeSet blacklistEdges = null;
String edgeBlacklistPath = envSettings.getEdgeBlacklistPath();
if (edgeBlacklistPath != null) {
File edgeBlacklistPathAsFile = new File(edgeBlacklistPath);
if (edgeBlacklistPathAsFile.exists()) {
Topology blacklistTopology = parseTopology(edgeBlacklistPathAsFile);
blacklistEdges = blacklistTopology.getEdges();
}
}
return blacklistEdges;
}
private double getElapsedTime(long beforeTime) {
long difference = System.currentTimeMillis() - beforeTime;
double seconds = difference / 1000d;
return seconds;
}
private InterfaceSet getFlowSinkSet(EnvironmentSettings envSettings) {
InterfaceSet flowSinks = new InterfaceSet();
Relation relation = getRelation(envSettings, FLOW_SINK_PREDICATE_NAME);
List<String> nodes = relation.getColumns().get(0).asStringList();
List<String> interfaces = relation.getColumns().get(1).asStringList();
for (int i = 0; i < nodes.size(); i++) {
String node = nodes.get(i);
String iface = interfaces.get(i);
NodeInterfacePair f = new NodeInterfacePair(node, iface);
flowSinks.add(f);
}
return flowSinks;
}
private InterfaceSet getFlowSinkSet(String dataPlanePath) {
_logger.info("Deserializing data plane: \"" + dataPlanePath + "\"...");
File dataPlanePathAsFile = new File(dataPlanePath);
DataPlane dataPlane = (DataPlane) deserializeObject(dataPlanePathAsFile);
_logger.info("OK\n");
return dataPlane.getFlowSinks();
}
private String getFlowTag() {
return _settings.getQuestionName() + ":" + _envSettings.getName();
}
private List<String> getHelpPredicates(Map<String, String> predicateSemantics) {
Set<String> helpPredicateSet = new LinkedHashSet<String>();
_settings.getHelpPredicates();
if (_settings.getHelpPredicates() == null) {
helpPredicateSet.addAll(predicateSemantics.keySet());
}
else {
helpPredicateSet.addAll(_settings.getHelpPredicates());
}
List<String> helpPredicates = new ArrayList<String>();
helpPredicates.addAll(helpPredicateSet);
Collections.sort(helpPredicates);
return helpPredicates;
}
private void getHistory() {
FlowHistory flowHistory = new FlowHistory();
if (_settings.getDiffQuestion()) {
checkTrafficFacts(_baseEnvSettings);
checkTrafficFacts(_diffEnvSettings);
String tag = getDifferentialFlowTag();
populateFlowHistory(flowHistory, _baseEnvSettings,
_baseEnvSettings.getName(), tag);
populateFlowHistory(flowHistory, _diffEnvSettings,
_diffEnvSettings.getName(), tag);
}
else {
checkTrafficFacts(_envSettings);
String tag = getFlowTag();
populateFlowHistory(flowHistory, _envSettings, _envSettings.getName(),
tag);
}
String jsonOutputPath = _settings.getAnswerJsonPath();
if (jsonOutputPath != null) {
String jsonOutput;
if (_settings.getDiffQuestion()) {
Map<String, Configuration> baseConfigurations = loadConfigurations(_baseEnvSettings);
Map<String, Configuration> diffConfigurations = loadConfigurations(_diffEnvSettings);
jsonOutput = flowHistory.toJsonString(_settings.getQuestionName(),
baseConfigurations, _baseEnvSettings.getName(),
diffConfigurations, _diffEnvSettings.getName());
}
else {
Map<String, Configuration> configurations = loadConfigurations();
jsonOutput = flowHistory.toJsonString(_settings.getQuestionName(),
configurations, _envSettings.getName(), null, null);
}
Util.writeFile(jsonOutputPath, jsonOutput);
}
_logger.output(flowHistory.toString());
}
private IbgpTopology getIbgpNeighbors() {
return getIbgpNeighbors(_envSettings);
}
private IbgpTopology getIbgpNeighbors(EnvironmentSettings envSettings) {
checkDataPlaneFacts(_envSettings);
IbgpTopology topology = new IbgpTopology();
Relation relation = getRelation(envSettings,
IBGP_NEIGHBORS_PREDICATE_NAME);
List<String> node1List = relation.getColumns().get(0).asStringList();
List<Ip> ip1List = relation.getColumns().get(1).asIpList();
List<String> node2List = relation.getColumns().get(2).asStringList();
List<Ip> ip2List = relation.getColumns().get(3).asIpList();
int numEntries = node1List.size();
for (int i = 0; i < numEntries; i++) {
String node1 = node1List.get(i);
String node2 = node2List.get(i);
Ip ip1 = ip1List.get(i);
Ip ip2 = ip2List.get(i);
NodeIpPair p1 = new NodeIpPair(node1, ip1);
NodeIpPair p2 = new NodeIpPair(node2, ip2);
IpEdge edge = new IpEdge(p1, p2);
topology.add(edge);
}
return topology;
}
private Set<NodeInterfacePair> getInterfaceBlacklist(
EnvironmentSettings envSettings) {
Set<NodeInterfacePair> blacklistInterfaces = null;
String interfaceBlacklistPath = envSettings.getInterfaceBlacklistPath();
if (interfaceBlacklistPath != null) {
File interfaceBlacklistPathAsFile = new File(interfaceBlacklistPath);
if (interfaceBlacklistPathAsFile.exists()) {
blacklistInterfaces = parseInterfaceBlacklist(interfaceBlacklistPathAsFile);
}
}
return blacklistInterfaces;
}
private NodeSet getNodeBlacklist(EnvironmentSettings envSettings) {
NodeSet blacklistNodes = null;
String nodeBlacklistPath = envSettings.getNodeBlacklistPath();
if (nodeBlacklistPath != null) {
File nodeBlacklistPathAsFile = new File(nodeBlacklistPath);
if (nodeBlacklistPathAsFile.exists()) {
blacklistNodes = parseNodeBlacklist(nodeBlacklistPathAsFile);
}
}
return blacklistNodes;
}
private Set<String> getNxtnetDataPlaneOutputSymbols() {
Set<String> symbols = new HashSet<String>();
symbols.addAll(NxtnetConstants.NXTNET_DATA_PLANE_OUTPUT_SYMBOLS);
if (_settings.getNxtnetDebugSymbols()) {
symbols.addAll(NxtnetConstants.NXTNET_DATA_PLANE_OUTPUT_DEBUG_SYMBOLS);
}
return symbols;
}
private String[] getNxtnetLogicFilenames(File logicDir) {
final Set<String> filenames = new TreeSet<String>();
Path logicDirPath = Paths.get(logicDir.toString());
FileVisitor<Path> nxtnetLogicFileCollector = new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
String fileStr = file.toString();
if (fileStr.endsWith(".pl")) {
filenames.add(fileStr);
}
return FileVisitResult.CONTINUE;
}
};
try {
Files.walkFileTree(logicDirPath, nxtnetLogicFileCollector);
}
catch (IOException e) {
throw new BatfishException("failed to retreive nxtnet logic files", e);
}
return filenames.toArray(new String[] {});
}
private String getNxtnetText(EnvironmentSettings envSettings,
String relationName) {
String nxtnetOutputDir;
if (getNxtnetDataPlaneOutputSymbols().contains(relationName)) {
nxtnetOutputDir = envSettings.getNxtnetDataPlaneOutputDir();
}
else if (getNxtnetTrafficOutputSymbols().contains(relationName)) {
nxtnetOutputDir = envSettings.getNxtnetTrafficOutputDir();
}
else {
throw new BatfishException("Predicate: \"" + relationName
+ "\" not an output symbol");
}
return getNxtnetText(nxtnetOutputDir, relationName);
}
private String getNxtnetText(String nxtnetOutputDir, String relationName) {
File relationFile = Paths.get(nxtnetOutputDir, relationName).toFile();
String content = Util.readFile(relationFile);
return content;
}
private Set<String> getNxtnetTrafficOutputSymbols() {
Set<String> symbols = new HashSet<String>();
symbols.addAll(NxtnetConstants.NXTNET_TRAFFIC_OUTPUT_SYMBOLS);
if (_settings.getNxtnetDebugSymbols()) {
symbols.addAll(NxtnetConstants.NXTNET_TRAFFIC_OUTPUT_DEBUG_SYMBOLS);
}
return symbols;
}
private PolicyRouteFibNodeMap getPolicyRouteFibNodeMap(
EnvironmentSettings envSettings) {
PolicyRouteFibNodeMap nodeMap = new PolicyRouteFibNodeMap();
Relation relation = getRelation(envSettings,
FIB_POLICY_ROUTE_NEXT_HOP_PREDICATE_NAME);
List<String> nodeList = relation.getColumns().get(0).asStringList();
List<Ip> ipList = relation.getColumns().get(1).asIpList();
List<String> outInterfaces = relation.getColumns().get(2).asStringList();
List<String> inNodes = relation.getColumns().get(3).asStringList();
List<String> inInterfaces = relation.getColumns().get(4).asStringList();
int size = nodeList.size();
for (int i = 0; i < size; i++) {
String nodeOut = nodeList.get(i);
String nodeIn = inNodes.get(i);
Ip ip = ipList.get(i);
String ifaceOut = outInterfaces.get(i);
String ifaceIn = inInterfaces.get(i);
PolicyRouteFibIpMap ipMap = nodeMap.get(nodeOut);
if (ipMap == null) {
ipMap = new PolicyRouteFibIpMap();
nodeMap.put(nodeOut, ipMap);
}
EdgeSet edges = ipMap.get(ip);
if (edges == null) {
edges = new EdgeSet();
ipMap.put(ip, edges);
}
Edge newEdge = new Edge(nodeOut, ifaceOut, nodeIn, ifaceIn);
edges.add(newEdge);
}
return nodeMap;
}
public PredicateInfo getPredicateInfo(Map<String, String> logicFiles) {
// Get predicate semantics from rules file
_logger.info("\n*** PARSING PREDICATE INFO ***\n");
resetTimer();
String predicateInfoPath = getPredicateInfoPath();
PredicateInfo predicateInfo = (PredicateInfo) deserializeObject(new File(
predicateInfoPath));
printElapsedTime();
return predicateInfo;
}
private String getPredicateInfoPath() {
File logicDir = retrieveLogicDir();
return Paths.get(logicDir.toString(), PREDICATE_INFO_FILENAME).toString();
}
private Relation getRelation(EnvironmentSettings envSettings,
String predicateName) {
String nxtnetText = getNxtnetText(envSettings, predicateName);
Relation relation = new Relation.Builder(predicateName).build(
_predicateInfo, nxtnetText);
return relation;
}
private FibMap getRouteForwardingRules(EnvironmentSettings envSettings) {
FibMap fibs = new FibMap();
Relation relation = getRelation(envSettings, FIB_PREDICATE_NAME);
EntityTable entityTable = initEntityTable(envSettings);
List<String> nameList = relation.getColumns().get(0).asStringList();
List<Prefix> networkList = relation.getColumns().get(1)
.asPrefixList(entityTable);
List<String> interfaceList = relation.getColumns().get(2).asStringList();
List<String> nextHopList = relation.getColumns().get(3).asStringList();
List<String> nextHopIntList = relation.getColumns().get(4).asStringList();
String currentHostname = "";
Map<String, Integer> startIndices = new HashMap<String, Integer>();
Map<String, Integer> endIndices = new HashMap<String, Integer>();
for (int i = 0; i < nameList.size(); i++) {
String currentRowHostname = nameList.get(i);
if (!currentHostname.equals(currentRowHostname)) {
if (i > 0) {
endIndices.put(currentHostname, i - 1);
}
currentHostname = currentRowHostname;
startIndices.put(currentHostname, i);
}
}
endIndices.put(currentHostname, nameList.size() - 1);
for (String hostname : startIndices.keySet()) {
FibSet fibRows = new FibSet();
fibs.put(hostname, fibRows);
int startIndex = startIndices.get(hostname);
int endIndex = endIndices.get(hostname);
for (int i = startIndex; i <= endIndex; i++) {
Prefix prefix = networkList.get(i);
String iface = interfaceList.get(i);
String nextHop = nextHopList.get(i);
String nextHopInt = nextHopIntList.get(i);
fibRows.add(new FibRow(prefix, iface, nextHop, nextHopInt));
}
}
return fibs;
}
private RouteSet getRoutes(EnvironmentSettings envSettings) {
checkDataPlaneFacts(envSettings);
RouteSet routes = new RouteSet();
EntityTable entityTable = initEntityTable(envSettings);
Relation relation = getRelation(envSettings,
INSTALLED_ROUTE_PREDICATE_NAME);
List<PrecomputedRoute> routeList = relation.getColumns().get(0)
.asRouteList(entityTable);
routes.addAll(routeList);
return routes;
}
private Map<String, String> getSemanticsFiles() {
final Map<String, String> semanticsFiles = new HashMap<String, String>();
File logicDirFile = retrieveLogicDir();
FileVisitor<Path> visitor = new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
String pathString = file.toString();
if (pathString.endsWith(".semantics")) {
String contents = Util.readFile(file.toFile());
semanticsFiles.put(pathString, contents);
}
return super.visitFile(file, attrs);
}
};
try {
Files.walkFileTree(Paths.get(logicDirFile.getAbsolutePath()), visitor);
}
catch (IOException e) {
e.printStackTrace();
}
cleanupLogicDir();
return semanticsFiles;
}
private Set<Edge> getSymmetricEdgePairs(EdgeSet edges) {
LinkedHashSet<Edge> consumedEdges = new LinkedHashSet<Edge>();
for (Edge edge : edges) {
if (consumedEdges.contains(edge)) {
continue;
}
Edge reverseEdge = new Edge(edge.getInterface2(), edge.getInterface1());
consumedEdges.add(edge);
consumedEdges.add(reverseEdge);
}
return consumedEdges;
}
public boolean GetTerminatedWithException() {
return _terminatedWithException;
}
private void histogram(String testRigPath) {
Map<File, String> configurationData = readConfigurationFiles(testRigPath,
BfConsts.RELPATH_CONFIGURATIONS_DIR);
Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData);
_logger.info("Building feature histogram...");
MultiSet<String> histogram = new TreeMultiSet<String>();
for (VendorConfiguration vc : vendorConfigurations.values()) {
Set<String> unimplementedFeatures = vc.getUnimplementedFeatures();
histogram.add(unimplementedFeatures);
}
_logger.info("OK\n");
for (String feature : histogram.elements()) {
int count = histogram.count(feature);
_logger.output(feature + ": " + count + "\n");
}
}
private EntityTable initEntityTable(EnvironmentSettings envSettings) {
EntityTable entityTable = _entityTables.get(envSettings);
if (entityTable == null) {
Map<String, String> nxtnetPredicateContents = new HashMap<String, String>();
String nxtnetDataPlaneOutputDir = envSettings
.getNxtnetDataPlaneOutputDir();
String nxtnetTrafficOutputDir = envSettings
.getNxtnetTrafficOutputDir();
if (nxtnetDataPlaneOutputDir != null
&& new File(nxtnetDataPlaneOutputDir).exists()) {
nxtnetPredicateContents.putAll(readFacts(nxtnetDataPlaneOutputDir,
NxtnetConstants.NXTNET_DATA_PLANE_ENTITY_SYMBOLS));
}
if (nxtnetTrafficOutputDir != null
&& new File(nxtnetTrafficOutputDir).exists()) {
nxtnetPredicateContents.putAll(readFacts(nxtnetTrafficOutputDir,
NxtnetConstants.NXTNET_TRAFFIC_ENTITY_SYMBOLS));
}
entityTable = new EntityTable(nxtnetPredicateContents, _predicateInfo);
_entityTables.put(envSettings, entityTable);
}
return entityTable;
}
private void initQuestionEnvironment(EnvironmentSettings envSettings,
Question question, boolean dp, boolean differentialContext) {
if (!environmentExists(envSettings)) {
File envPath = new File(envSettings.getEnvPath());
// create environment required folders
envPath.mkdirs();
// write node blacklist from question
if (!question.getNodeBlacklist().isEmpty()) {
StringBuilder nodeBlacklistSb = new StringBuilder();
for (String node : question.getNodeBlacklist()) {
nodeBlacklistSb.append(node + "\n");
}
String nodeBlacklist = nodeBlacklistSb.toString();
Util.writeFile(envSettings.getNodeBlacklistPath(), nodeBlacklist);
}
// write interface blacklist from question
if (!question.getInterfaceBlacklist().isEmpty()) {
StringBuilder interfaceBlacklistSb = new StringBuilder();
for (NodeInterfacePair pair : question.getInterfaceBlacklist()) {
interfaceBlacklistSb.append(pair.getHostname() + ":"
+ pair.getInterface() + "\n");
}
String interfaceBlacklist = interfaceBlacklistSb.toString();
Util.writeFile(envSettings.getInterfaceBlacklistPath(),
interfaceBlacklist);
}
}
if (dp && !dataPlaneDependenciesExist(envSettings)) {
_settings.setDumpControlPlaneFacts(true);
boolean usePrecomputedFacts = _settings.getUsePrecomputedFacts();
Map<String, StringBuilder> cpFactBins = new LinkedHashMap<String, StringBuilder>();
initControlPlaneFactBins(cpFactBins, !usePrecomputedFacts);
if (!usePrecomputedFacts) {
computeControlPlaneFacts(cpFactBins, differentialContext,
envSettings);
}
nxtnetDataPlane(envSettings);
computeDataPlane(envSettings);
_entityTables.clear();
}
}
private void initQuestionEnvironments(Question question, boolean diff,
boolean diffActive, boolean dp) {
if (diff || !diffActive) {
initQuestionEnvironment(_baseEnvSettings, question, dp, false);
}
if (diff || diffActive) {
if (_settings.getDiffEnvironmentName() == null
|| (diffActive && !_settings.getDiffActive())) {
String diffEnvironmentName = UUID.randomUUID().toString();
_settings.setDiffEnvironmentName(diffEnvironmentName);
applyAutoBaseDir(_settings);
_envSettings = _diffEnvSettings;
}
initQuestionEnvironment(_diffEnvSettings, question, dp, true);
}
}
private boolean isJavaSerializationData(File inputFile) {
try (FileInputStream i = new FileInputStream(inputFile)) {
int headerLength = JAVA_SERIALIZED_OBJECT_HEADER.length;
byte[] headerBytes = new byte[headerLength];
int result = i.read(headerBytes, 0, headerLength);
if (result != headerLength) {
throw new BatfishException("Read wrong number of bytes");
}
return Arrays.equals(headerBytes, JAVA_SERIALIZED_OBJECT_HEADER);
}
catch (IOException e) {
throw new BatfishException("Could not read header from file: "
+ inputFile.toString(), e);
}
}
private void keepBlocks(List<String> blockNames) {
Set<String> allBlockNames = new LinkedHashSet<String>();
allBlockNames.addAll(Block.BLOCKS.keySet());
for (String blockName : blockNames) {
Block block = Block.BLOCKS.get(blockName);
if (block == null) {
throw new BatfishException("Invalid block name: \"" + blockName
+ "\"");
}
Set<Block> dependencies = block.getDependencies();
for (Block dependency : dependencies) {
allBlockNames.remove(dependency.getName());
}
allBlockNames.remove(blockName);
}
List<String> qualifiedBlockNames = new ArrayList<String>();
for (String blockName : allBlockNames) {
String qualifiedBlockName = LB_BATFISH_LIBRARY_NAME + ":" + blockName
+ "_rules";
qualifiedBlockNames.add(qualifiedBlockName);
}
// lbFrontend.removeBlocks(qualifiedBlockNames);
}
public Map<String, Configuration> loadConfigurations() {
return loadConfigurations(_envSettings);
}
public Map<String, Configuration> loadConfigurations(
EnvironmentSettings envSettings) {
Map<String, Configuration> configurations = deserializeConfigurations(_settings
.getSerializeIndependentPath());
processNodeBlacklist(configurations, envSettings);
processInterfaceBlacklist(configurations, envSettings);
processDeltaConfigurations(configurations, envSettings);
disableUnusableVpnInterfaces(configurations, envSettings);
return configurations;
}
private Topology loadTopology(EnvironmentSettings envSettings) {
String topologyPath = envSettings.getSerializedTopologyPath();
File topologyPathFile = new File(topologyPath);
_logger.info("Deserializing topology...");
Topology topology = (Topology) deserializeObject(topologyPathFile);
_logger.info("OK\n");
return topology;
}
private void nxtnetDataPlane(EnvironmentSettings envSettings) {
Map<String, String> inputFacts = readFacts(
envSettings.getControlPlaneFactsDir(),
NxtnetConstants.NXTNET_DATA_PLANE_COMPUTATION_FACTS);
writeNxtnetInput(getNxtnetDataPlaneOutputSymbols(), inputFacts,
envSettings.getNxtnetDataPlaneInputFile(), envSettings);
runNxtnet(envSettings.getNxtnetDataPlaneInputFile(),
envSettings.getNxtnetDataPlaneOutputDir());
writeRoutes(envSettings.getPrecomputedRoutesPath(), envSettings);
}
private void nxtnetTraffic() {
if (_settings.getDiffQuestion()) {
nxtnetTraffic(_baseEnvSettings);
nxtnetTraffic(_diffEnvSettings);
}
else {
nxtnetTraffic(_envSettings);
}
}
private void nxtnetTraffic(EnvironmentSettings envSettings) {
writeNxtnetPrecomputedRoutes(envSettings);
Map<String, String> inputControlPlaneFacts = readFacts(
envSettings.getControlPlaneFactsDir(),
NxtnetConstants.NXTNET_TRAFFIC_COMPUTATION_CONTROL_PLANE_FACTS);
Map<String, String> inputFlowFacts = readFacts(
envSettings.getTrafficFactsDir(),
NxtnetConstants.NXTNET_TRAFFIC_COMPUTATION_FLOW_FACTS);
Map<String, String> inputFacts = new TreeMap<String, String>();
inputFacts.putAll(inputControlPlaneFacts);
inputFacts.putAll(inputFlowFacts);
writeNxtnetInput(getNxtnetTrafficOutputSymbols(), inputFacts,
envSettings.getNxtnetTrafficInputFile(), envSettings);
runNxtnet(envSettings.getNxtnetTrafficInputFile(),
envSettings.getNxtnetTrafficOutputDir());
}
private void outputAnswer(Answer answer) {
ObjectMapper mapper = new ObjectMapper();
mapper.enable(SerializationFeature.INDENT_OUTPUT);
try {
String jsonString = mapper.writeValueAsString(answer);
_logger.output(jsonString);
}
catch (Exception e) {
BatfishException be = new BatfishException("Error in sending answer",
e);
Answer failureAnswer = Answer.failureAnswer(be.getMessage());
try {
String failureJsonString = mapper.writeValueAsString(failureAnswer);
_logger.output(failureJsonString);
}
catch (Exception e1) {
_logger.errorf("Could not serialize failure answer.", e1);
}
}
}
private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser) {
return parse(parser, _logger, _settings);
}
private ParserRuleContext parse(BatfishCombinedParser<?, ?> parser,
String filename) {
_logger.info("Parsing: \"" + filename + "\"...");
return parse(parser);
}
private AwsVpcConfiguration parseAwsVpcConfigurations(
Map<File, String> configurationData) {
AwsVpcConfiguration config = new AwsVpcConfiguration();
for (File file : configurationData.keySet()) {
// we stop classic link processing here because it interferes with VPC
// processing
if (file.toString().contains("classic-link")) {
_logger.errorf("%s has classic link configuration\n",
file.toString());
continue;
}
JSONObject jsonObj = null;
try {
jsonObj = new JSONObject(configurationData.get(file));
}
catch (JSONException e) {
_logger.errorf("%s does not have valid json\n", file.toString());
}
if (jsonObj != null) {
try {
config.addConfigElement(jsonObj, _logger);
}
catch (JSONException e) {
throw new BatfishException("Problems parsing JSON in "
+ file.toString(), e);
}
}
}
return config;
}
private Set<NodeInterfacePair> parseInterfaceBlacklist(
File interfaceBlacklistPath) {
Set<NodeInterfacePair> ifaces = new TreeSet<NodeInterfacePair>();
String interfaceBlacklistText = Util.readFile(interfaceBlacklistPath);
String[] interfaceBlacklistLines = interfaceBlacklistText.split("\n");
for (String interfaceBlacklistLine : interfaceBlacklistLines) {
String trimmedLine = interfaceBlacklistLine.trim();
if (trimmedLine.length() > 0) {
String[] parts = trimmedLine.split(":");
if (parts.length != 2) {
throw new BatfishException(
"Invalid node-interface pair format: " + trimmedLine);
}
String hostname = parts[0];
String iface = parts[1];
NodeInterfacePair p = new NodeInterfacePair(hostname, iface);
ifaces.add(p);
}
}
return ifaces;
}
private NodeSet parseNodeBlacklist(File nodeBlacklistPath) {
NodeSet nodeSet = new NodeSet();
String nodeBlacklistText = Util.readFile(nodeBlacklistPath);
String[] nodeBlacklistLines = nodeBlacklistText.split("\n");
for (String nodeBlacklistLine : nodeBlacklistLines) {
String hostname = nodeBlacklistLine.trim();
if (hostname.length() > 0) {
nodeSet.add(hostname);
}
}
return nodeSet;
}
private NodeRoleMap parseNodeRoles(String testRigPath) {
Path rolePath = Paths.get(testRigPath, "node_roles");
String roleFileText = Util.readFile(rolePath.toFile());
_logger.info("Parsing: \"" + rolePath.toAbsolutePath().toString() + "\"");
BatfishCombinedParser<?, ?> parser = new RoleCombinedParser(roleFileText,
_settings);
RoleExtractor extractor = new RoleExtractor();
ParserRuleContext tree = parse(parser);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(extractor, tree);
NodeRoleMap nodeRoles = extractor.getRoleMap();
return nodeRoles;
}
private Question parseQuestion() {
String questionPath = _settings.getQuestionPath();
File questionFile = new File(questionPath);
_logger.info("Reading question file: \"" + questionPath + "\"...");
String questionText = Util.readFile(questionFile);
_logger.info("OK\n");
try {
ObjectMapper mapper = new ObjectMapper();
Question question = mapper.readValue(questionText, Question.class);
JSONObject parameters = (JSONObject) parseQuestionParameters();
question.setJsonParameters(parameters);
return question;
}
catch (IOException e1) {
_logger
.debugf(
"BF: could not parse as Json question: %s\nWill try old, custom parser.",
e1.getMessage());
}
QuestionParameters parameters = (QuestionParameters) parseQuestionParameters();
QuestionCombinedParser parser = new QuestionCombinedParser(questionText,
_settings);
QuestionExtractor extractor = new QuestionExtractor(parser, getFlowTag(),
parameters);
try {
ParserRuleContext tree = parse(parser, questionPath);
_logger.info("\tPost-processing...");
extractor.processParseTree(tree);
_logger.info("OK\n");
}
catch (CleanBatfishException e) {
throw e;
}
catch (ParserBatfishException e) {
String error = "Error parsing question: \"" + questionPath + "\"";
throw new BatfishException(error, e);
}
catch (Exception e) {
String error = "Error post-processing parse tree of question file: \""
+ questionPath + "\"";
throw new BatfishException(error, e);
}
return extractor.getQuestion();
}
private Object parseQuestionParameters() {
String questionParametersPath = _settings.getQuestionParametersPath();
File questionParametersFile = new File(questionParametersPath);
if (!questionParametersFile.exists()) {
throw new BatfishException("Missing question parameters file: \""
+ questionParametersPath + "\"");
}
_logger.info("Reading question parameters file: \""
+ questionParametersPath + "\"...");
String questionText = Util.readFile(questionParametersFile);
_logger.info("OK\n");
try {
JSONObject jObj = (questionText.trim().isEmpty())? new JSONObject()
: new JSONObject(questionText);
return jObj;
}
catch (JSONException e1) {
_logger
.debugf(
"BF: could not parse as Json parameters: %s\nWill try old, custom parser.",
e1.getMessage());
}
QuestionParametersCombinedParser parser = new QuestionParametersCombinedParser(
questionText, _settings);
QuestionParametersExtractor extractor = new QuestionParametersExtractor();
ParserRuleContext tree = null;
try {
tree = parse(parser, questionParametersPath);
}
catch (ParserBatfishException e) {
String error = "Error parsing question parameters: \""
+ questionParametersPath + "\"";
throw new BatfishException(error, e);
}
try {
_logger.info("\tPost-processing...");
extractor.processParseTree(tree);
_logger.info("OK\n");
}
catch (Exception e) {
String error = "Error post-processing parse tree of question parameters file: \""
+ questionParametersPath + "\"";
throw new BatfishException(error, e);
}
return extractor.getParameters();
}
private Topology parseTopology(File topologyFilePath) {
_logger.info("*** PARSING TOPOLOGY ***\n");
resetTimer();
String topologyFileText = Util.readFile(topologyFilePath);
BatfishCombinedParser<?, ?> parser = null;
TopologyExtractor extractor = null;
_logger.info("Parsing: \""
+ topologyFilePath.getAbsolutePath().toString() + "\" ...");
if (topologyFileText.startsWith("autostart")) {
parser = new GNS3TopologyCombinedParser(topologyFileText, _settings);
extractor = new GNS3TopologyExtractor();
}
else if (topologyFileText
.startsWith(BatfishTopologyCombinedParser.HEADER)) {
parser = new BatfishTopologyCombinedParser(topologyFileText, _settings);
extractor = new BatfishTopologyExtractor();
}
else if (topologyFileText.equals("")) {
throw new BatfishException("ERROR: empty topology\n");
}
else {
_logger.fatal("...ERROR\n");
throw new BatfishException("Topology format error");
}
ParserRuleContext tree = parse(parser);
ParseTreeWalker walker = new ParseTreeWalker();
walker.walk(extractor, tree);
Topology topology = extractor.getTopology();
printElapsedTime();
return topology;
}
private Map<String, VendorConfiguration> parseVendorConfigurations(
Map<File, String> configurationData) {
_logger.info("\n*** PARSING VENDOR CONFIGURATION FILES ***\n");
resetTimer();
Map<String, VendorConfiguration> vendorConfigurations = new TreeMap<String, VendorConfiguration>();
List<ParseVendorConfigurationJob> jobs = new ArrayList<ParseVendorConfigurationJob>();
for (File currentFile : configurationData.keySet()) {
Warnings warnings = new Warnings(_settings.getPedanticAsError(),
_settings.getPedanticRecord()
&& _logger.isActive(BatfishLogger.LEVEL_PEDANTIC),
_settings.getRedFlagAsError(), _settings.getRedFlagRecord()
&& _logger.isActive(BatfishLogger.LEVEL_REDFLAG),
_settings.getUnimplementedAsError(),
_settings.getUnimplementedRecord()
&& _logger.isActive(BatfishLogger.LEVEL_UNIMPLEMENTED),
_settings.printParseTree());
String fileText = configurationData.get(currentFile);
ParseVendorConfigurationJob job = new ParseVendorConfigurationJob(
_settings, fileText, currentFile, warnings);
jobs.add(job);
}
BatfishJobExecutor<ParseVendorConfigurationJob, ParseVendorConfigurationResult, Map<String, VendorConfiguration>> executor = new BatfishJobExecutor<ParseVendorConfigurationJob, ParseVendorConfigurationResult, Map<String, VendorConfiguration>>(
_settings, _logger);
executor.executeJobs(jobs, vendorConfigurations);
printElapsedTime();
return vendorConfigurations;
}
private void populateConfigurationFactBins(
Collection<Configuration> configurations, CommunitySet allCommunities,
Map<String, StringBuilder> factBins) {
_logger.info("\n*** EXTRACTING FACTS FROM CONFIGURATIONS ***\n");
resetTimer();
for (Configuration c : configurations) {
allCommunities.addAll(c.getCommunities());
}
Set<Ip> interfaceIps = new HashSet<Ip>();
Set<Ip> externalBgpRemoteIps = new TreeSet<Ip>();
for (Configuration c : configurations) {
for (Interface i : c.getInterfaces().values()) {
for (Prefix p : i.getAllPrefixes()) {
Ip ip = p.getAddress();
interfaceIps.add(ip);
}
}
BgpProcess proc = c.getBgpProcess();
if (proc != null) {
for (Prefix neighborPrefix : proc.getNeighbors().keySet()) {
if (neighborPrefix.getPrefixLength() == Prefix.MAX_PREFIX_LENGTH) {
Ip neighborAddress = neighborPrefix.getAddress();
externalBgpRemoteIps.add(neighborAddress);
}
}
}
}
externalBgpRemoteIps.removeAll(interfaceIps);
StringBuilder wSetExternalBgpRemoteIp = factBins
.get("SetExternalBgpRemoteIp");
StringBuilder wSetNetwork = factBins.get("SetNetwork");
for (Ip ip : externalBgpRemoteIps) {
String node = ip.toString();
long ipAsLong = ip.asLong();
wSetExternalBgpRemoteIp.append(node + "|" + ipAsLong + "\n");
wSetNetwork.append(ipAsLong + "|" + ipAsLong + "|" + ipAsLong + "|"
+ Prefix.MAX_PREFIX_LENGTH + "\n");
}
boolean pedanticAsError = _settings.getPedanticAsError();
boolean pedanticRecord = _settings.getPedanticRecord();
boolean redFlagAsError = _settings.getRedFlagAsError();
boolean redFlagRecord = _settings.getRedFlagRecord();
boolean unimplementedAsError = _settings.getUnimplementedAsError();
boolean unimplementedRecord = _settings.getUnimplementedRecord();
boolean processingError = false;
for (Configuration c : configurations) {
String hostname = c.getHostname();
_logger.debug("Extracting facts from: \"" + hostname + "\"");
Warnings warnings = new Warnings(pedanticAsError, pedanticRecord,
redFlagAsError, redFlagRecord, unimplementedAsError,
unimplementedRecord, false);
try {
ConfigurationFactExtractor cfe = new ConfigurationFactExtractor(c,
allCommunities, factBins, warnings);
cfe.writeFacts();
_logger.debug("...OK\n");
}
catch (BatfishException e) {
_logger.fatal("...EXTRACTION ERROR\n");
_logger.fatal(ExceptionUtils.getStackTrace(e));
processingError = true;
if (_settings.getExitOnFirstError()) {
break;
}
else {
continue;
}
}
finally {
for (String warning : warnings.getRedFlagWarnings()) {
_logger.redflag(warning);
}
for (String warning : warnings.getUnimplementedWarnings()) {
_logger.unimplemented(warning);
}
for (String warning : warnings.getPedanticWarnings()) {
_logger.pedantic(warning);
}
}
}
if (processingError) {
throw new BatfishException(
"Failed to extract facts from vendor-indpendent configuration structures");
}
printElapsedTime();
}
private void populateFlowHistory(FlowHistory flowHistory,
EnvironmentSettings envSettings, String environmentName, String tag) {
EntityTable entityTable = initEntityTable(envSettings);
Relation relation = getRelation(envSettings, FLOW_HISTORY_PREDICATE_NAME);
List<Flow> flows = relation.getColumns().get(0).asFlowList(entityTable);
List<String> historyLines = relation.getColumns().get(1).asStringList();
int numEntries = flows.size();
for (int i = 0; i < numEntries; i++) {
Flow flow = flows.get(i);
if (flow.getTag().equals(tag)) {
String historyLine = historyLines.get(i);
FlowTrace flowTrace = new FlowTrace(historyLine);
flowHistory.addFlowTrace(flow, environmentName, flowTrace);
}
}
}
private void populatePrecomputedBgpAdvertisements(
AdvertisementSet advertSet, Map<String, StringBuilder> cpFactBins) {
StringBuilder adverts = cpFactBins
.get(PRECOMPUTED_BGP_ADVERTISEMENTS_PREDICATE_NAME);
StringBuilder advertCommunities = cpFactBins
.get(PRECOMPUTED_BGP_ADVERTISEMENT_COMMUNITY_PREDICATE_NAME);
StringBuilder advertPaths = cpFactBins
.get(PRECOMPUTED_BGP_ADVERTISEMENT_AS_PATH_PREDICATE_NAME);
StringBuilder advertPathLengths = cpFactBins
.get(PRECOMPUTED_BGP_ADVERTISEMENT_AS_PATH_LENGTH_PREDICATE_NAME);
StringBuilder wNetworks = cpFactBins.get(NETWORKS_PREDICATE_NAME);
Set<Prefix> networks = new HashSet<Prefix>();
int pcIndex = 0;
for (BgpAdvertisement advert : advertSet) {
String type = advert.getType();
Prefix network = advert.getNetwork();
networks.add(network);
long networkStart = network.getAddress().asLong();
long networkEnd = network.getEndAddress().asLong();
int prefixLength = network.getPrefixLength();
long nextHopIp = advert.getNextHopIp().asLong();
String srcNode = advert.getSrcNode();
long srcIp = advert.getSrcIp().asLong();
String dstNode = advert.getDstNode();
long dstIp = advert.getDstIp().asLong();
String srcProtocol = advert.getSrcProtocol().protocolName();
String originType = advert.getOriginType().toString();
int localPref = advert.getLocalPreference();
int med = advert.getMed();
long originatorIp = advert.getOriginatorIp().asLong();
adverts.append(pcIndex + "|" + type + "|" + networkStart + "|"
+ networkEnd + "|" + prefixLength + "|" + nextHopIp + "|"
+ srcNode + "|" + srcIp + "|" + dstNode + "|" + dstIp + "|"
+ srcProtocol + "|" + originType + "|" + localPref + "|" + med
+ "|" + originatorIp + "\n");
for (Long community : advert.getCommunities()) {
advertCommunities.append(pcIndex + "|" + community + "\n");
}
AsPath asPath = advert.getAsPath();
int asPathLength = asPath.size();
for (int i = 0; i < asPathLength; i++) {
AsSet asSet = asPath.get(i);
for (Integer as : asSet) {
advertPaths.append(pcIndex + "|" + i + "|" + as + "\n");
}
}
advertPathLengths.append(pcIndex + "|" + asPathLength + "\n");
pcIndex++;
}
for (Prefix network : networks) {
long networkStart = network.getNetworkAddress().asLong();
long networkEnd = network.getEndAddress().asLong();
int prefixLength = network.getPrefixLength();
wNetworks.append(networkStart + "|" + networkStart + "|" + networkEnd
+ "|" + prefixLength + "\n");
}
}
private void populatePrecomputedBgpAdvertisements(
String precomputedBgpAdvertisementsPath,
Map<String, StringBuilder> cpFactBins) {
File inputFile = new File(precomputedBgpAdvertisementsPath);
AdvertisementSet rawAdvertSet = (AdvertisementSet) deserializeObject(inputFile);
AdvertisementSet incomingAdvertSet = new AdvertisementSet();
for (BgpAdvertisement advert : rawAdvertSet) {
String type = advert.getType();
switch (type) {
case "ibgp_ti":
case "bgp_ti":
incomingAdvertSet.add(advert);
break;
default:
continue;
}
}
populatePrecomputedBgpAdvertisements(incomingAdvertSet, cpFactBins);
}
private void populatePrecomputedFacts(String precomputedFactsPath,
Map<String, StringBuilder> cpFactBins) {
File precomputedFactsDir = new File(precomputedFactsPath);
String[] filenames = precomputedFactsDir.list();
for (String filename : filenames) {
File file = Paths.get(precomputedFactsPath, filename).toFile();
StringBuilder sb = cpFactBins.get(filename);
if (sb == null) {
throw new BatfishException("File: \"" + filename
+ "\" does not correspond to a fact");
}
String contents = Util.readFile(file);
sb.append(contents);
}
Set<Map.Entry<String, StringBuilder>> cpEntries = cpFactBins.entrySet();
Set<Map.Entry<String, StringBuilder>> cpEntriesToRemove = new HashSet<Map.Entry<String, StringBuilder>>();
for (Entry<String, StringBuilder> e : cpEntries) {
StringBuilder sb = e.getValue();
if (sb.toString().length() == 0) {
cpEntriesToRemove.add(e);
}
}
for (Entry<String, StringBuilder> e : cpEntriesToRemove) {
cpEntries.remove(e);
}
}
private void populatePrecomputedIbgpNeighbors(
String precomputedIbgpNeighborsPath,
Map<String, StringBuilder> cpFactBins) {
File inputFile = new File(precomputedIbgpNeighborsPath);
StringBuilder sb = cpFactBins
.get(PRECOMPUTED_IBGP_NEIGHBORS_PREDICATE_NAME);
IbgpTopology topology = (IbgpTopology) deserializeObject(inputFile);
for (IpEdge edge : topology) {
String node1 = edge.getNode1();
long ip1 = edge.getIp1().asLong();
String node2 = edge.getNode2();
long ip2 = edge.getIp2().asLong();
sb.append(node1 + "|" + ip1 + "|" + node2 + "|" + ip2 + "\n");
}
}
private void populatePrecomputedRoutes(List<String> precomputedRoutesPaths,
Map<String, StringBuilder> cpFactBins) {
StringBuilder sb = cpFactBins.get(PRECOMPUTED_ROUTES_PREDICATE_NAME);
StringBuilder wNetworks = cpFactBins.get(NETWORKS_PREDICATE_NAME);
Set<Prefix> networks = new HashSet<Prefix>();
for (String precomputedRoutesPath : precomputedRoutesPaths) {
File inputFile = new File(precomputedRoutesPath);
RouteSet routes = (RouteSet) deserializeObject(inputFile);
for (PrecomputedRoute route : routes) {
String node = route.getNode();
Prefix prefix = route.getPrefix();
networks.add(prefix);
long networkStart = prefix.getNetworkAddress().asLong();
long networkEnd = prefix.getEndAddress().asLong();
int prefixLength = prefix.getPrefixLength();
long nextHopIp = route.getNextHopIp().asLong();
int admin = route.getAdministrativeCost();
int cost = route.getCost();
String protocol = route.getProtocol().protocolName();
int tag = route.getTag();
sb.append(node + "|" + networkStart + "|" + networkEnd + "|"
+ prefixLength + "|" + nextHopIp + "|" + admin + "|" + cost
+ "|" + protocol + "|" + tag + "\n");
}
for (Prefix network : networks) {
long networkStart = network.getNetworkAddress().asLong();
long networkEnd = network.getEndAddress().asLong();
int prefixLength = network.getPrefixLength();
wNetworks.append(networkStart + "|" + networkStart + "|"
+ networkEnd + "|" + prefixLength + "\n");
}
}
}
private void printAllPredicateSemantics(
Map<String, String> predicateSemantics) {
// Get predicate semantics from rules file
_logger.info("\n*** PRINTING PREDICATE SEMANTICS ***\n");
List<String> helpPredicates = getHelpPredicates(predicateSemantics);
for (String predicate : helpPredicates) {
printPredicateSemantics(predicate);
_logger.info("\n");
}
}
private void printElapsedTime() {
double seconds = getElapsedTime(_timerCount);
_logger.info("Time taken for this task: " + seconds + " seconds\n");
}
private void printPredicate(EnvironmentSettings envSettings,
String predicateName) {
boolean function = _predicateInfo.isFunction(predicateName);
StringBuilder sb = new StringBuilder();
EntityTable entityTable = initEntityTable(envSettings);
Relation relation = getRelation(envSettings, predicateName);
List<Column> columns = relation.getColumns();
List<LBValueType> valueTypes = _predicateInfo
.getPredicateValueTypes(predicateName);
int numColumns = columns.size();
int numRows = relation.getNumRows();
for (int i = 0; i < numRows; i++) {
sb.append(predicateName);
if (function) {
sb.append("[");
}
else {
sb.append("(");
}
for (int j = 0; j < numColumns; j++) {
boolean last = (j == numColumns - 1);
boolean penultimate = (j == numColumns - 2);
String part = columns.get(j)
.getItem(i, entityTable, valueTypes.get(j)).toString();
sb.append(part);
if ((function && !last && !penultimate) || (!function && !last)) {
sb.append(", ");
}
else if (function && penultimate) {
sb.append("] = ");
}
else if (last) {
if (!function) {
sb.append(")");
}
sb.append(".\n");
}
}
}
_logger.output(sb.toString());
}
public void printPredicates(EnvironmentSettings envSettings,
Set<String> predicateNames) {
// Print predicate(s) here
_logger.info("\n*** SUBMITTING QUERY(IES) ***\n");
resetTimer();
for (String predicateName : predicateNames) {
printPredicate(envSettings, predicateName);
}
printElapsedTime();
}
private void printPredicateSemantics(String predicateName) {
String semantics = _predicateInfo.getPredicateSemantics(predicateName);
if (semantics == null) {
semantics = "<missing>";
}
_logger.info("\n");
_logger.info("Predicate: " + predicateName + "\n");
_logger.info("Semantics: " + semantics + "\n");
}
private void printSymmetricEdgePairs() {
Map<String, Configuration> configs = loadConfigurations();
EdgeSet edges = synthesizeTopology(configs);
Set<Edge> symmetricEdgePairs = getSymmetricEdgePairs(edges);
List<Edge> edgeList = new ArrayList<Edge>();
edgeList.addAll(symmetricEdgePairs);
for (int i = 0; i < edgeList.size() / 2; i++) {
Edge edge1 = edgeList.get(2 * i);
Edge edge2 = edgeList.get(2 * i + 1);
_logger.output(edge1.getNode1() + ":" + edge1.getInt1() + ","
+ edge1.getNode2() + ":" + edge1.getInt2() + " "
+ edge2.getNode1() + ":" + edge2.getInt1() + ","
+ edge2.getNode2() + ":" + edge2.getInt2() + "\n");
}
printElapsedTime();
}
private void processDeltaConfigurations(
Map<String, Configuration> configurations,
EnvironmentSettings envSettings) {
Map<String, Configuration> deltaConfigurations = getDeltaConfigurations(envSettings);
configurations.putAll(deltaConfigurations);
// TODO: deal with topological changes
}
/**
* Reads the external bgp announcement specified in the environment, and
* populates the vendor-independent configurations with data about those
* announcements
*
* @param configurations
* The vendor-independent configurations to be modified
* @param envSettings
* The settings for the environment, containing e.g. the path to
* the external announcements file
* @param cpFactBins
* The container for nxtnet facts
* @param allCommunities
*/
private void processExternalBgpAnnouncements(
Map<String, Configuration> configurations,
EnvironmentSettings envSettings,
Map<String, StringBuilder> cpFactBins, CommunitySet allCommunities) {
AdvertisementSet advertSet = new AdvertisementSet();
String externalBgpAnnouncementsPath = envSettings
.getExternalBgpAnnouncementsPath();
File externalBgpAnnouncementsFile = new File(externalBgpAnnouncementsPath);
if (externalBgpAnnouncementsFile.exists()) {
String externalBgpAnnouncementsFileContents = Util
.readFile(externalBgpAnnouncementsFile);
// Populate advertSet with BgpAdvertisements that
// gets passed to populatePrecomputedBgpAdvertisements.
// See populatePrecomputedBgpAdvertisements for the things that get
// extracted from these advertisements.
try {
JSONObject jsonObj = new JSONObject(
externalBgpAnnouncementsFileContents);
JSONArray announcements = jsonObj
.getJSONArray(BfConsts.KEY_BGP_ANNOUNCEMENTS);
for (int index = 0; index < announcements.length(); index++) {
JSONObject announcement = announcements.getJSONObject(index);
BgpAdvertisement bgpAdvertisement = new BgpAdvertisement(
announcement);
allCommunities.addAll(bgpAdvertisement.getCommunities());
advertSet.add(bgpAdvertisement);
}
}
catch (JSONException e) {
throw new BatfishException("Problems parsing JSON in "
+ externalBgpAnnouncementsFile.toString(), e);
}
populatePrecomputedBgpAdvertisements(advertSet, cpFactBins);
}
}
private void processInterfaceBlacklist(
Map<String, Configuration> configurations,
EnvironmentSettings envSettings) {
Set<NodeInterfacePair> blacklistInterfaces = getInterfaceBlacklist(envSettings);
if (blacklistInterfaces != null) {
for (NodeInterfacePair p : blacklistInterfaces) {
String hostname = p.getHostname();
String iface = p.getInterface();
Configuration node = configurations.get(hostname);
node.getInterfaces().get(iface).setActive(false);
}
}
}
private void processNodeBlacklist(Map<String, Configuration> configurations,
EnvironmentSettings envSettings) {
NodeSet blacklistNodes = getNodeBlacklist(envSettings);
if (blacklistNodes != null) {
for (String hostname : blacklistNodes) {
configurations.remove(hostname);
}
}
}
private Topology processTopologyFile(File topologyFilePath,
Map<String, StringBuilder> factBins) {
Topology topology = parseTopology(topologyFilePath);
return topology;
}
private void query() {
Map<String, String> allPredicateNames = _predicateInfo
.getPredicateNames();
Set<String> predicateNames = new TreeSet<String>();
if (_settings.getQueryAll()) {
predicateNames.addAll(allPredicateNames.keySet());
}
else {
predicateNames.addAll(_settings.getPredicates());
}
checkQuery(_envSettings, predicateNames);
printPredicates(_envSettings, predicateNames);
}
private Map<File, String> readConfigurationFiles(String testRigPath,
String configsType) {
_logger.infof("\n*** READING %s FILES ***\n", configsType);
resetTimer();
Map<File, String> configurationData = new TreeMap<File, String>();
File configsPath = Paths.get(testRigPath, configsType).toFile();
File[] configFilePaths = configsPath.listFiles(new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return !name.startsWith(".");
}
});
if (configFilePaths == null) {
throw new BatfishException(
"Error reading test rig configs directory: \""
+ configsPath.toString() + "\"");
}
Arrays.sort(configFilePaths);
for (File file : configFilePaths) {
_logger.debug("Reading: \"" + file.toString() + "\"\n");
String fileTextRaw = Util.readFile(file.getAbsoluteFile());
String fileText = fileTextRaw
+ ((fileTextRaw.length() != 0) ? "\n" : "");
configurationData.put(file, fileText);
}
printElapsedTime();
return configurationData;
}
private Map<String, String> readFacts(String factsDir, Set<String> factNames) {
Map<String, String> inputFacts = new TreeMap<String, String>();
for (String factName : factNames) {
File factFile = Paths.get(factsDir, factName).toFile();
String contents = Util.readFile(factFile);
inputFacts.put(factName, contents);
}
return inputFacts;
}
private void removeBlocks(List<String> blockNames) {
Set<String> allBlockNames = new LinkedHashSet<String>();
for (String blockName : blockNames) {
Block block = Block.BLOCKS.get(blockName);
if (block == null) {
throw new BatfishException("Invalid block name: \"" + blockName
+ "\"");
}
Set<Block> dependents = block.getDependents();
for (Block dependent : dependents) {
allBlockNames.add(dependent.getName());
}
allBlockNames.add(blockName);
}
List<String> qualifiedBlockNames = new ArrayList<String>();
for (String blockName : allBlockNames) {
String qualifiedBlockName = LB_BATFISH_LIBRARY_NAME + ":" + blockName
+ "_rules";
qualifiedBlockNames.add(qualifiedBlockName);
}
// lbFrontend.removeBlocks(qualifiedBlockNames);
}
private void resetTimer() {
_timerCount = System.currentTimeMillis();
}
private File retrieveLogicDir() {
File logicDirFile = null;
final String locatorFilename = LogicResourceLocator.class.getSimpleName()
+ ".class";
URL logicSourceURL = LogicResourceLocator.class.getProtectionDomain()
.getCodeSource().getLocation();
String logicSourceString = logicSourceURL.toString();
UrlZipExplorer zip = null;
StringFilter lbFilter = new StringFilter() {
@Override
public boolean accept(String filename) {
return filename.endsWith(".semantics") || filename.endsWith(".pl")
|| filename.endsWith(locatorFilename)
|| filename.endsWith(PREDICATE_INFO_FILENAME);
}
};
if (logicSourceString.startsWith("onejar:")) {
FileVisitor<Path> visitor = null;
try {
zip = new UrlZipExplorer(logicSourceURL);
Path destinationDir = Files
.createTempDirectory("batfish_tmp_logic");
File destinationDirAsFile = destinationDir.toFile();
zip.extractFiles(lbFilter, destinationDirAsFile);
visitor = new SimpleFileVisitor<Path>() {
private String _projectDirectory;
@Override
public String toString() {
return _projectDirectory;
}
@Override
public FileVisitResult visitFile(Path aFile,
BasicFileAttributes aAttrs) throws IOException {
if (aFile.endsWith(locatorFilename)) {
_projectDirectory = aFile.getParent().toString();
return FileVisitResult.TERMINATE;
}
return FileVisitResult.CONTINUE;
}
};
Files.walkFileTree(destinationDir, visitor);
_tmpLogicDir = destinationDirAsFile;
}
catch (IOException e) {
throw new BatfishException(
"Failed to retrieve logic dir from onejar archive", e);
}
String fileString = visitor.toString();
return new File(fileString);
}
else {
String logicPackageResourceName = LogicResourceLocator.class
.getPackage().getName().replace('.', SEPARATOR.charAt(0));
try {
logicDirFile = new File(LogicResourceLocator.class.getClassLoader()
.getResource(logicPackageResourceName).toURI());
}
catch (URISyntaxException e) {
throw new BatfishException("Failed to resolve logic directory", e);
}
return logicDirFile;
}
}
public void run() {
boolean action = false;
if (_settings.getQuery() || _settings.getPrintSemantics()
|| _settings.getDataPlane() || _settings.getWriteRoutes()
|| _settings.getWriteBgpAdvertisements()
|| _settings.getWriteIbgpNeighbors() || _settings.getHistory()
|| _settings.getNxtnetDataPlane() || _settings.getNxtnetTraffic()
|| _settings.getAnswer()) {
Map<String, String> logicFiles = getSemanticsFiles();
_predicateInfo = getPredicateInfo(logicFiles);
// Print predicate semantics and quit if requested
if (_settings.getPrintSemantics()) {
printAllPredicateSemantics(_predicateInfo.getPredicateSemantics());
return;
}
}
if (_settings.getPrintSymmetricEdgePairs()) {
printSymmetricEdgePairs();
return;
}
if (_settings.getSynthesizeTopology()) {
writeSynthesizedTopology();
return;
}
if (_settings.getSynthesizeJsonTopology()) {
writeJsonTopology();
return;
}
if (_settings.getBuildPredicateInfo()) {
buildPredicateInfo();
return;
}
if (_settings.getHistogram()) {
histogram(_settings.getTestRigPath());
return;
}
if (_settings.getGenerateOspfTopologyPath() != null) {
generateOspfConfigs(_settings.getGenerateOspfTopologyPath(),
_settings.getSerializeIndependentPath());
return;
}
if (_settings.getFlatten()) {
String flattenSource = _settings.getTestRigPath();
String flattenDestination = _settings.getFlattenDestination();
flatten(flattenSource, flattenDestination);
return;
}
if (_settings.getGenerateStubs()) {
String inputRole = _settings.getGenerateStubsInputRole();
String interfaceDescriptionRegex = _settings
.getGenerateStubsInterfaceDescriptionRegex();
int stubAs = _settings.getGenerateStubsRemoteAs();
generateStubs(inputRole, stubAs, interfaceDescriptionRegex);
return;
}
// if (_settings.getZ3()) {
// Map<String, Configuration> configurations = loadConfigurations();
// String dataPlanePath = _envSettings.getDataPlanePath();
// if (dataPlanePath == null) {
// throw new BatfishException("Missing path to data plane");
// File dataPlanePathAsFile = new File(dataPlanePath);
// genZ3(configurations, dataPlanePathAsFile);
// return;
if (_settings.getAnonymize()) {
anonymizeConfigurations();
return;
}
// if (_settings.getRoleTransitQuery()) {
// genRoleTransitQueries();
// return;
if (_settings.getSerializeVendor()) {
String testRigPath = _settings.getTestRigPath();
String outputPath = _settings.getSerializeVendorPath();
serializeVendorConfigs(testRigPath, outputPath);
action = true;
}
if (_settings.getSerializeIndependent()) {
String inputPath = _settings.getSerializeVendorPath();
String outputPath = _settings.getSerializeIndependentPath();
serializeIndependentConfigs(inputPath, outputPath);
action = true;
}
Map<String, StringBuilder> cpFactBins = null;
if (_settings.getDumpControlPlaneFacts()) {
boolean usePrecomputedFacts = _settings.getUsePrecomputedFacts();
cpFactBins = new LinkedHashMap<String, StringBuilder>();
initControlPlaneFactBins(cpFactBins, !usePrecomputedFacts);
if (!usePrecomputedFacts) {
computeControlPlaneFacts(cpFactBins, _settings.getDiffActive(),
_envSettings);
}
action = true;
}
if (_settings.getNxtnetDataPlane()) {
nxtnetDataPlane(_envSettings);
action = true;
}
if (_settings.getUsePrecomputedFacts()) {
populatePrecomputedFacts(_settings.getPrecomputedFactsPath(),
cpFactBins);
}
// Remove blocks if requested
if (_settings.getRemoveBlocks() || _settings.getKeepBlocks()) {
List<String> blockNames = _settings.getBlockNames();
if (_settings.getRemoveBlocks()) {
removeBlocks(blockNames);
}
if (_settings.getKeepBlocks()) {
keepBlocks(blockNames);
}
action = true;
}
if (_settings.getAnswer()) {
answer();
action = true;
}
if (_settings.getQuery()) {
query();
return;
}
if (_settings.getDataPlane()) {
computeDataPlane();
action = true;
}
if (_settings.getNxtnetTraffic()) {
nxtnetTraffic();
action = true;
}
if (_settings.getHistory()) {
getHistory();
action = true;
}
if (_settings.getWriteRoutes()) {
writeRoutes(_settings.getPrecomputedRoutesPath(), _envSettings);
action = true;
}
if (_settings.getWriteBgpAdvertisements()) {
writeBgpAdvertisements(
_settings.getPrecomputedBgpAdvertisementsPath(), _envSettings);
action = true;
}
if (_settings.getWriteIbgpNeighbors()) {
writeIbgpNeighbors(_settings.getPrecomputedIbgpNeighborsPath());
action = true;
}
if (!action) {
throw new CleanBatfishException(
"No task performed! Run with -help flag to see usage\n");
}
}
private void runNxtnet(String nxtnetInputFile, String nxtnetOutputDir) {
_logger.info("\n*** RUNNING NXTNET ***\n");
resetTimer();
File logicDir = retrieveLogicDir();
String[] logicFilenames = getNxtnetLogicFilenames(logicDir);
DefaultExecutor executor = new DefaultExecutor();
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
ByteArrayOutputStream errStream = new ByteArrayOutputStream();
executor.setStreamHandler(new PumpStreamHandler(outStream, errStream));
executor.setExitValue(0);
CommandLine cmdLine = new CommandLine(NXTNET_COMMAND);
cmdLine.addArgument("-dir");
cmdLine.addArgument(nxtnetOutputDir);
cmdLine.addArgument("-rev-lookup");
cmdLine.addArgument("-mcc");
cmdLine.addArgument(nxtnetInputFile);
cmdLine.addArguments(logicFilenames);
StringBuilder cmdLineSb = new StringBuilder();
cmdLineSb.append(NXTNET_COMMAND + " ");
cmdLineSb.append(org.batfish.common.Util.joinStrings(" ",
cmdLine.getArguments()));
String cmdLineString = cmdLineSb.toString();
boolean failure = false;
_logger.info("Command line: " + cmdLineString + " \n");
try {
executor.execute(cmdLine);
}
catch (ExecuteException e) {
failure = true;
}
catch (IOException e) {
throw new BatfishException("Unknown error running nxtnet", e);
}
finally {
cleanupLogicDir();
byte[] outRaw = outStream.toByteArray();
byte[] errRaw = errStream.toByteArray();
String out = null;
String err = null;
try {
out = new String(outRaw, "UTF-8");
err = new String(errRaw, "UTF-8");
}
catch (IOException e) {
throw new BatfishException("Error reading nxnet output", e);
}
StringBuilder sb = new StringBuilder();
if (failure) {
sb.append("nxtnet terminated abnormally:\n");
sb.append("nxtnet command line: " + cmdLine.toString() + "\n");
sb.append(err);
throw new BatfishException(sb.toString());
}
else {
sb.append("nxtnet output:\n");
sb.append(out);
_logger.debug(sb.toString());
_logger.info("nxtnet completed successfully\n");
}
}
printElapsedTime();
}
private void serializeAwsVpcConfigs(String testRigPath, String outputPath) {
Map<File, String> configurationData = readConfigurationFiles(testRigPath,
BfConsts.RELPATH_AWS_VPC_CONFIGS_DIR);
AwsVpcConfiguration config = parseAwsVpcConfigurations(configurationData);
if (!_settings.getNoOutput()) {
_logger.info("\n*** SERIALIZING AWS CONFIGURATION STRUCTURES ***\n");
resetTimer();
new File(outputPath).mkdirs();
Path currentOutputPath = Paths.get(outputPath,
BfConsts.RELPATH_AWS_VPC_CONFIGS_FILE);
_logger.debug("Serializing AWS VPCs to "
+ currentOutputPath.toString() + "\"...");
serializeObject(config, currentOutputPath.toFile());
_logger.debug("OK\n");
}
printElapsedTime();
}
private void serializeIndependentConfigs(
Map<String, Configuration> configurations, String outputPath) {
if (configurations == null) {
throw new BatfishException("Exiting due to conversion error(s)");
}
if (!_settings.getNoOutput()) {
_logger
.info("\n*** SERIALIZING VENDOR-INDEPENDENT CONFIGURATION STRUCTURES ***\n");
resetTimer();
new File(outputPath).mkdirs();
for (String name : configurations.keySet()) {
Configuration c = configurations.get(name);
Path currentOutputPath = Paths.get(outputPath, name);
_logger.info("Serializing: \"" + name + "\" ==> \""
+ currentOutputPath.toString() + "\"");
serializeObject(c, currentOutputPath.toFile());
_logger.info(" ...OK\n");
}
printElapsedTime();
}
}
private void serializeIndependentConfigs(String vendorConfigPath,
String outputPath) {
Map<String, Configuration> configurations = getConfigurations(vendorConfigPath);
serializeIndependentConfigs(configurations, outputPath);
}
private void serializeNetworkConfigs(String testRigPath, String outputPath) {
Map<File, String> configurationData = readConfigurationFiles(testRigPath,
BfConsts.RELPATH_CONFIGURATIONS_DIR);
Map<String, VendorConfiguration> vendorConfigurations = parseVendorConfigurations(configurationData);
if (vendorConfigurations == null) {
throw new BatfishException("Exiting due to parser errors");
}
String nodeRolesPath = _settings.getNodeRolesPath();
if (nodeRolesPath != null) {
NodeRoleMap nodeRoles = parseNodeRoles(testRigPath);
for (Entry<String, RoleSet> nodeRolesEntry : nodeRoles.entrySet()) {
String hostname = nodeRolesEntry.getKey();
VendorConfiguration config = vendorConfigurations.get(hostname);
if (config == null) {
throw new BatfishException(
"role set assigned to non-existent node: \"" + hostname
+ "\"");
}
RoleSet roles = nodeRolesEntry.getValue();
config.setRoles(roles);
}
if (!_settings.getNoOutput()) {
_logger.info("Serializing node-roles mappings: \"" + nodeRolesPath
+ "\"...");
serializeObject(nodeRoles, new File(nodeRolesPath));
_logger.info("OK\n");
}
}
if (!_settings.getNoOutput()) {
_logger
.info("\n*** SERIALIZING VENDOR CONFIGURATION STRUCTURES ***\n");
resetTimer();
new File(outputPath).mkdirs();
for (String name : vendorConfigurations.keySet()) {
VendorConfiguration vc = vendorConfigurations.get(name);
Path currentOutputPath = Paths.get(outputPath, name);
_logger.debug("Serializing: \"" + name + "\" ==> \""
+ currentOutputPath.toString() + "\"...");
serializeObject(vc, currentOutputPath.toFile());
_logger.debug("OK\n");
}
printElapsedTime();
}
}
private void serializeObject(Object object, File outputFile) {
FileOutputStream fos;
ObjectOutputStream oos;
try {
fos = new FileOutputStream(outputFile);
if (_settings.getSerializeToText()) {
XStream xstream = new XStream(new DomDriver("UTF-8"));
oos = xstream.createObjectOutputStream(fos);
}
else {
oos = new ObjectOutputStream(fos);
}
oos.writeObject(object);
oos.close();
}
catch (IOException e) {
throw new BatfishException(
"Failed to serialize object to output file: "
+ outputFile.toString(), e);
}
}
private void serializeVendorConfigs(String testRigPath, String outputPath) {
boolean configsFound = false;
// look for network configs
File networkConfigsPath = Paths.get(testRigPath,
BfConsts.RELPATH_CONFIGURATIONS_DIR).toFile();
if (networkConfigsPath.exists()) {
serializeNetworkConfigs(testRigPath, outputPath);
configsFound = true;
}
// look for AWS VPC configs
File awsVpcConfigsPath = Paths.get(testRigPath,
BfConsts.RELPATH_AWS_VPC_CONFIGS_DIR).toFile();
if (awsVpcConfigsPath.exists()) {
serializeAwsVpcConfigs(testRigPath, outputPath);
configsFound = true;
}
if (!configsFound) {
throw new BatfishException("No valid configurations found");
}
}
public void SetTerminatedWithException(boolean terminatedWithException) {
_terminatedWithException = terminatedWithException;
}
private Synthesizer synthesizeAcls(Map<String, Configuration> configurations) {
_logger.info("\n*** GENERATING Z3 LOGIC ***\n");
resetTimer();
_logger.info("Synthesizing Z3 ACL logic...");
Synthesizer s = new Synthesizer(configurations, _settings.getSimplify());
List<String> warnings = s.getWarnings();
int numWarnings = warnings.size();
if (numWarnings == 0) {
_logger.info("OK\n");
}
else {
for (String warning : warnings) {
_logger.warn(warning);
}
}
printElapsedTime();
return s;
}
private Synthesizer synthesizeDataPlane(
Map<String, Configuration> configurations, File dataPlanePath) {
_logger.info("\n*** GENERATING Z3 LOGIC ***\n");
resetTimer();
_logger.info("Deserializing data plane: \"" + dataPlanePath.toString()
+ "\"...");
DataPlane dataPlane = (DataPlane) deserializeObject(dataPlanePath);
_logger.info("OK\n");
_logger.info("Synthesizing Z3 logic...");
Synthesizer s = new Synthesizer(configurations, dataPlane,
_settings.getSimplify());
List<String> warnings = s.getWarnings();
int numWarnings = warnings.size();
if (numWarnings == 0) {
_logger.info("OK\n");
}
else {
for (String warning : warnings) {
_logger.warn(warning);
}
}
printElapsedTime();
return s;
}
private EdgeSet synthesizeTopology(Map<String, Configuration> configurations) {
_logger
.info("\n*** SYNTHESIZING TOPOLOGY FROM INTERFACE SUBNET INFORMATION ***\n");
resetTimer();
EdgeSet edges = new EdgeSet();
Map<Prefix, Set<NodeInterfacePair>> prefixInterfaces = new HashMap<Prefix, Set<NodeInterfacePair>>();
for (Entry<String, Configuration> e1 : configurations.entrySet()) {
String nodeName = e1.getKey();
Configuration node = e1.getValue();
for (Entry<String, Interface> e2 : node.getInterfaces().entrySet()) {
Interface iface = e2.getValue();
String ifaceName = e2.getKey();
Prefix prefix = e2.getValue().getPrefix();
if (!iface.isLoopback(node.getVendor()) && iface.getActive()
&& prefix != null && prefix.getPrefixLength() < 32) {
Prefix network = new Prefix(prefix.getNetworkAddress(),
prefix.getPrefixLength());
NodeInterfacePair pair = new NodeInterfacePair(nodeName,
ifaceName);
Set<NodeInterfacePair> interfaceBucket = prefixInterfaces
.get(network);
if (interfaceBucket == null) {
interfaceBucket = new HashSet<NodeInterfacePair>();
prefixInterfaces.put(network, interfaceBucket);
}
interfaceBucket.add(pair);
}
}
}
for (Set<NodeInterfacePair> bucket : prefixInterfaces.values()) {
for (NodeInterfacePair p1 : bucket) {
for (NodeInterfacePair p2 : bucket) {
if (!p1.equals(p2)) {
Edge edge = new Edge(p1, p2);
edges.add(edge);
}
}
}
}
return edges;
}
private void writeBgpAdvertisements(String writeAdvertsPath,
EnvironmentSettings envSettings) {
AdvertisementSet adverts = getAdvertisements(envSettings);
File advertsFile = new File(writeAdvertsPath);
File parentDir = advertsFile.getParentFile();
if (parentDir != null) {
parentDir.mkdirs();
}
_logger.info("Serializing: BGP advertisements => \"" + writeAdvertsPath
+ "\"...");
serializeObject(adverts, advertsFile);
_logger.info("OK\n");
}
private void writeFlowSinkFacts(InterfaceSet flowSinks,
Map<String, StringBuilder> cpFactBins) {
StringBuilder sb = cpFactBins.get("SetFlowSinkInterface");
for (NodeInterfacePair f : flowSinks) {
String node = f.getHostname();
String iface = f.getInterface();
sb.append(node + "|" + iface + "\n");
}
}
private void writeIbgpNeighbors(String ibgpTopologyPath) {
IbgpTopology topology = getIbgpNeighbors();
File ibgpTopologyFile = new File(ibgpTopologyPath);
File parentDir = ibgpTopologyFile.getParentFile();
if (parentDir != null) {
parentDir.mkdirs();
}
_logger.info("Serializing: IBGP neighbors => \"" + ibgpTopologyPath
+ "\"...");
serializeObject(topology, ibgpTopologyFile);
_logger.info("OK\n");
}
private void writeJsonTopology() {
try {
Map<String, Configuration> configs = loadConfigurations();
EdgeSet textEdges = synthesizeTopology(configs);
JSONArray jEdges = new JSONArray();
for (Edge textEdge : textEdges) {
Configuration node1 = configs.get(textEdge.getNode1());
Configuration node2 = configs.get(textEdge.getNode2());
Interface interface1 = node1.getInterfaces()
.get(textEdge.getInt1());
Interface interface2 = node2.getInterfaces()
.get(textEdge.getInt2());
JSONObject jEdge = new JSONObject();
jEdge.put("interface1", interface1.toJSONObject());
jEdge.put("interface2", interface2.toJSONObject());
jEdges.put(jEdge);
}
JSONObject master = new JSONObject();
JSONObject topology = new JSONObject();
topology.put("edges", jEdges);
master.put("topology", topology);
String text = master.toString(3);
_logger.output(text);
}
catch (JSONException e) {
throw new BatfishException("Failed to synthesize JSON topology", e);
}
}
private void writeNxtnetInput(Set<String> outputSymbols,
Map<String, String> inputFacts, String nxtnetInputFile,
EnvironmentSettings envSettings) {
checkComputeNxtnetRelations(envSettings);
StringBuilder sb = new StringBuilder();
sb.append("output_symbols([");
List<String> outputSymbolsList = new ArrayList<String>();
outputSymbolsList.addAll(outputSymbols);
int numOutputSymbols = outputSymbols.size();
for (int i = 0; i < numOutputSymbols; i++) {
String symbol = outputSymbolsList.get(i);
sb.append("'" + symbol + "'");
if (i < numOutputSymbols - 1) {
sb.append(",");
}
else {
sb.append("]).\n");
}
}
String lineDelimiter = Pattern.quote("|");
for (Entry<String, String> e : inputFacts.entrySet()) {
String predicateName = e.getKey();
String contents = e.getValue();
LBValueTypeList valueTypes = _predicateInfo
.getPredicateValueTypes(predicateName);
String[] lines = contents.split("\n");
for (int i = 1; i < lines.length; i++) {
sb.append("'" + predicateName + "'(");
String line = lines[i];
String[] parts = line.split(lineDelimiter);
for (int j = 0; j < parts.length; j++) {
String part = parts[j];
boolean isNum;
LBValueType currentValueType = valueTypes.get(j);
switch (currentValueType) {
case ENTITY_INDEX_BGP_ADVERTISEMENT:
case ENTITY_INDEX_FLOW:
case ENTITY_INDEX_INT:
case ENTITY_INDEX_NETWORK:
case ENTITY_INDEX_ROUTE:
case ENTITY_REF_AUTONOMOUS_SYSTEM:
case ENTITY_REF_INT:
case ENTITY_REF_IP:
case FLOAT:
case INT:
isNum = true;
break;
case ENTITY_REF_ADVERTISEMENT_TYPE:
case ENTITY_REF_AS_PATH:
case ENTITY_REF_FLOW_TAG:
case ENTITY_REF_INTERFACE:
case ENTITY_REF_NODE:
case ENTITY_REF_ORIGIN_TYPE:
case ENTITY_REF_POLICY_MAP:
case ENTITY_REF_ROUTING_PROTOCOL:
case ENTITY_REF_STRING:
case STRING:
isNum = false;
break;
default:
throw new BatfishException("invalid value type");
}
if (!isNum) {
sb.append("'" + part + "'");
}
else {
sb.append(part);
}
if (j < parts.length - 1) {
sb.append(",");
}
else {
sb.append(").\n");
}
}
}
}
String output = sb.toString();
Util.writeFile(nxtnetInputFile, output);
}
private void writeNxtnetPrecomputedRoutes(EnvironmentSettings envSettings) {
String precomputedRoutesPath = envSettings.getPrecomputedRoutesPath();
Map<String, StringBuilder> prFactBins = new HashMap<String, StringBuilder>();
initControlPlaneFactBins(prFactBins, true);
Set<String> prPredicates = new HashSet<String>();
prPredicates.add(PRECOMPUTED_ROUTES_PREDICATE_NAME);
prPredicates.add(NETWORKS_PREDICATE_NAME);
prFactBins.keySet().retainAll(prPredicates);
populatePrecomputedRoutes(
Collections.singletonList(precomputedRoutesPath), prFactBins);
dumpFacts(prFactBins, envSettings.getTrafficFactsDir());
}
private void writeRoutes(String writeRoutesPath,
EnvironmentSettings envSettings) {
RouteSet routes = getRoutes(envSettings);
File routesFile = new File(writeRoutesPath);
File parentDir = routesFile.getParentFile();
if (parentDir != null) {
parentDir.mkdirs();
}
_logger.info("Serializing: routes => \"" + writeRoutesPath + "\"...");
serializeObject(routes, routesFile);
_logger.info("OK\n");
}
private void writeSynthesizedTopology() {
Map<String, Configuration> configs = loadConfigurations();
EdgeSet edges = synthesizeTopology(configs);
_logger.output(BatfishTopologyCombinedParser.HEADER + "\n");
for (Edge edge : edges) {
_logger.output(edge.getNode1() + ":" + edge.getInt1() + ","
+ edge.getNode2() + ":" + edge.getInt2() + "\n");
}
printElapsedTime();
}
private void writeTopologyFacts(Topology topology,
Map<String, StringBuilder> factBins) {
TopologyFactExtractor tfe = new TopologyFactExtractor(topology);
tfe.writeFacts(factBins);
}
}
|
package com.fincatto.documentofiscal.mdfe3.classes;
import com.fincatto.documentofiscal.DFAmbiente;
import com.fincatto.documentofiscal.DFUnidadeFederativa;
import java.util.Arrays;
public enum MDFAutorizador3 {
RS {
@Override
public String getMDFeRecepcao(DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente)
? "https://mdfe-homologacao.svrs.rs.gov.br/ws/MDFerecepcao/MDFeRecepcao.asmx"
: "https://mdfe.svrs.rs.gov.br/ws/MDFerecepcao/MDFeRecepcao.asmx";
}
@Override
public String getMDFeRetornoRecepcao(DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente)
? "https://mdfe-homologacao.svrs.rs.gov.br/ws/MDFeRetRecepcao/MDFeRetRecepcao.asmx"
: "https://mdfe.svrs.rs.gov.br/ws/MDFeRetRecepcao/MDFeRetRecepcao.asmx";
}
@Override
public String getMDFeRecepcaoEvento(DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente)
? "https://mdfe-homologacao.svrs.rs.gov.br/ws/MDFeRecepcaoEvento/MDFeRecepcaoEvento.asmx"
: "https://mdfe.svrs.rs.gov.br/ws/MDFeRecepcaoEvento/MDFeRecepcaoEvento.asmx";
}
@Override
public String getMDFeStatusServico(DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente)
? "https://mdfe-homologacao.svrs.rs.gov.br/ws/MDFeStatusServico/MDFeStatusServico.asmx"
: "https://mdfe.svrs.rs.gov.br/ws/MDFeStatusServico/MDFeStatusServico.asmx";
}
@Override
public String getMDFeConsulta(DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente)
? "https://mdfe-homologacao.svrs.rs.gov.br/ws/MDFeConsulta/MDFeConsulta.asmx"
: "https://mdfe.svrs.rs.gov.br/ws/MDFeConsulta/MDFeConsulta.asmx";
}
@Override
public String getMDFeConsNaoEnc(DFAmbiente ambiente) {
return DFAmbiente.HOMOLOGACAO.equals(ambiente)
? "https://mdfe-homologacao.svrs.rs.gov.br/ws/MDFeConsNaoEnc/MDFeConsNaoEnc.asmx"
: "https://mdfe.svrs.rs.gov.br/ws/MDFeConsNaoEnc/MDFeConsNaoEnc.asmx";
}
@Override
public DFUnidadeFederativa[] getUFs() {
return new DFUnidadeFederativa[]{DFUnidadeFederativa.RS};
}
};
public abstract String getMDFeRecepcao(final DFAmbiente ambiente);
public abstract String getMDFeRetornoRecepcao(final DFAmbiente ambiente);
public abstract String getMDFeRecepcaoEvento(final DFAmbiente ambiente);
public abstract String getMDFeStatusServico(final DFAmbiente ambiente);
public abstract String getMDFeConsulta(final DFAmbiente ambiente);
public abstract String getMDFeConsNaoEnc(final DFAmbiente ambiente);
public abstract DFUnidadeFederativa[] getUFs();
public static MDFAutorizador3 valueOfCodigoUF(final DFUnidadeFederativa uf) {
for (final MDFAutorizador3 autorizador : MDFAutorizador3.values()) {
if (Arrays.asList(autorizador.getUFs()).contains(uf)) {
return autorizador;
}
}
return RS;
}
}
|
package com.sun.star.wizards.ui;
import com.sun.star.awt.XTextComponent;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.uno.Exception;
import com.sun.star.wizards.common.FileAccess;
import com.sun.star.wizards.common.SystemDialog;
public class PathSelection {
UnoDialog2 CurUnoDialog;
XMultiServiceFactory xMSF;
int iDialogType;
int iTransferMode;
public String sDefaultDirectory = "";
public String sDefaultName = "";
public String sDefaultFilter = "";
public boolean usedPathPicker = false;
public XPathSelectionListener xAction;
public XTextComponent xSaveTextBox;
private final int CMDSELECTPATH = 1;
private final int TXTSAVEPATH = 1;
public static class DialogTypes {
public static final int FOLDER = 0;
public static final int FILE = 1;
}
public static class TransferMode {
public static final int SAVE = 0;
public static final int LOAD = 1;
}
public PathSelection(XMultiServiceFactory xMSF, UnoDialog2 CurUnoDialog, int TransferMode, int DialogType) {
this.CurUnoDialog = CurUnoDialog;
this.xMSF = xMSF;
this.iDialogType = DialogType;
this.iTransferMode = TransferMode;
}
public void insert(int DialogStep, int XPos, int YPos, int Width, short CurTabIndex, String LabelText, boolean Enabled, String TxtHelpURL, String BtnHelpURL) {
CurUnoDialog.insertControlModel("com.sun.star.awt.UnoControlFixedTextModel", "lblSaveAs", new String[] { "Enabled", "Height", "Label", "PositionX", "PositionY", "Step", "TabIndex", "Width" }, new Object[] { new Boolean(Enabled), new Integer(8), LabelText, new Integer(XPos), new Integer(YPos), new Integer(DialogStep), new Short(CurTabIndex), new Integer(Width)});
xSaveTextBox = CurUnoDialog.insertTextField("txtSavePath", "callXPathSelectionListener", this, new String[] { "Enabled", "Height", "HelpURL", "PositionX", "PositionY", "Step", "TabIndex", "Width" }, new Object[] { new Boolean(Enabled), new Integer(12), TxtHelpURL, new Integer(XPos), new Integer(YPos + 10), new Integer(DialogStep), new Short((short)(CurTabIndex + 1)), new Integer(Width - 26)});
CurUnoDialog.insertButton("cmdSelectPath", "triggerPathPicker", this, new String[] { "Enabled", "Height", "HelpURL", "Label", "PositionX", "PositionY", "Step", "TabIndex", "Width" }, new Object[] { new Boolean(Enabled), new Integer(14), BtnHelpURL, "...", new Integer(XPos + Width - 16), new Integer(YPos + 9), new Integer(DialogStep), new Short((short)(CurTabIndex + 2)), new Integer(16)});
}
public void addSelectionListener(XPathSelectionListener xAction) {
this.xAction = xAction;
}
public String getSelectedPath() {
return xSaveTextBox.getText();
}
public void initializePath() {
try {
FileAccess myFA = new FileAccess(xMSF);
xSaveTextBox.setText(myFA.getPath(sDefaultDirectory + "/" + sDefaultName, null));
} catch (Exception e) {
e.printStackTrace();
}
}
public void triggerPathPicker() {
try {
switch (iTransferMode) {
case TransferMode.SAVE :
switch (iDialogType) {
case DialogTypes.FOLDER :
//TODO: write code for picking a folder for saving
break;
case DialogTypes.FILE :
usedPathPicker = true;
SystemDialog myFilePickerDialog = SystemDialog.createStoreDialog(xMSF);
myFilePickerDialog.callStoreDialog(sDefaultDirectory, sDefaultName, sDefaultFilter);
String sStorePath = myFilePickerDialog.sStorePath;
if (sStorePath != null) {
FileAccess myFA = new FileAccess(xMSF);
xSaveTextBox.setText(myFA.getPath(sStorePath,null));
sDefaultDirectory = FileAccess.getParentDir(sStorePath);
sDefaultName = myFA.getFilename(sStorePath);
}
break;
default :
break;
}
break;
case TransferMode.LOAD :
switch (iDialogType) {
case DialogTypes.FOLDER :
//TODO: write code for picking a folder for loading
break;
case DialogTypes.FILE :
//TODO: write code for picking a file for loading
break;
default :
break;
}
break;
default :
break;
}
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void callXPathSelectionListener() {
if (xAction != null)
xAction.validatePath();
}
}
|
package com.github.minaasham.offheap.largecollections;
import com.github.minaasham.offheap.largecollections.serialization.FixedSizeObjectSerializer;
import com.github.minaasham.offheap.largecollections.serialization.MemoryReader;
import com.github.minaasham.offheap.largecollections.serialization.ObjectSerializer;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static java.lang.ThreadLocal.withInitial;
/**
* LargeHashMap, an open address hash map that can handle a large number of entries
* It utilizes the {@link sun.misc.Unsafe} object to allocate memory, hence it's not limited by the GC
*
* @param <K> The key type, cannot be null
* @param <V> The value type, cannot be null
*/
@AllArgsConstructor(access = AccessLevel.PRIVATE)
public final class LargeHashMap<K, V> implements LargeMap<K, V> {
/**
* Default load factor for the hash map
*/
private static final double DEFAULT_LOAD_FACTOR = 0.65;
/**
* Default capacity for the hash map
*/
private static final int DEFAULT_CAPACITY = 512;
/**
* The lock used to guarantee thread safety in map operations
*/
private final ReentrantReadWriteLock lock;
/**
* The memory reader passed to key and value serializer, it's reset every time
*/
private final ThreadLocal<UnsafeMemoryReader> memoryReader;
/**
* The memory writer passed to key and value serializer, it's reset every time
*/
private final ThreadLocal<UnsafeMemoryWriter> memoryWriter;
/**
* The key object serializer
*/
private final ObjectSerializer<K> keySerializer;
/**
* Does the key have a fixed size?
*/
private final boolean keyFixedSize;
/**
* The key's header size; 0 for fixed and Integer.BYTES for variable
*/
private final int keyHeaderSize;
/**
* The value object serializer
*/
private final ObjectSerializer<V> valueSerializer;
/**
* Does the value have a fixed size?
*/
private final boolean valueFixedSize;
/**
* The value's header size; 0 for fixed and Integer.BYTES for variable
*/
private final int valueHeaderSize;
/**
* The load factor for the hash map
*/
private final double loadFactor;
/**
* The address to the start of memory allocated for entry pointers
*/
private long entryPointerAddresses;
/**
* The current capacity of the hash map
*/
private long capacity;
/**
* The current size of the hash map
*/
private long size;
/**
* The number of modifications that happened to the hash map,
* this serves as an fail fast for the map's iterator
*/
private int modifications;
/**
* If the map was closed and disposed of its resources
*/
private boolean closed;
/**
* Factory method for creating a {@link LargeHashMap} object
*
* @param keySerializer The key serializer
* @param valueSerializer The value serializer
* @param <K> The key type
* @param <V> The value type
* @return A {@link LargeHashMap} object
*/
public static <K, V> LargeHashMap<K, V> of(ObjectSerializer<K> keySerializer, ObjectSerializer<V> valueSerializer) {
return LargeHashMap.of(keySerializer, valueSerializer, DEFAULT_LOAD_FACTOR, DEFAULT_CAPACITY);
}
/**
* Factory method for creating a {@link LargeHashMap} object
*
* @param keySerializer The key serializer
* @param valueSerializer The value serializer
* @param loadFactor The load factor
* @param <K> The key type
* @param <V> The value type
* @return A {@link LargeHashMap} object
*/
public static <K, V> LargeHashMap<K, V> of(ObjectSerializer<K> keySerializer, ObjectSerializer<V> valueSerializer, double loadFactor) {
return LargeHashMap.of(keySerializer, valueSerializer, loadFactor, DEFAULT_CAPACITY);
}
/**
* Factory method for creating a {@link LargeHashMap} object
*
* @param keySerializer The key serializer
* @param valueSerializer The value serializer
* @param capacity The initial capacity
* @param <K> The key type
* @param <V> The value type
* @return A {@link LargeHashMap} object
*/
public static <K, V> LargeHashMap<K, V> of(ObjectSerializer<K> keySerializer, ObjectSerializer<V> valueSerializer, long capacity) {
return LargeHashMap.of(keySerializer, valueSerializer, DEFAULT_LOAD_FACTOR, capacity);
}
/**
* Factory method for creating a {@link LargeHashMap} object
*
* @param keySerializer The key serializer
* @param valueSerializer The value serializer
* @param loadFactor The load factor, allowed values are more than 0 and less than or equal to 1
* @param capacity The initial capacity, must be a least 1
* @param <K> The key type
* @param <V> The value type
* @return A {@link LargeHashMap} object
*/
public static <K, V> LargeHashMap<K, V> of(@NonNull ObjectSerializer<K> keySerializer, @NonNull ObjectSerializer<V> valueSerializer, double loadFactor, long capacity) {
if (loadFactor <= 0 || 1 <= loadFactor) throw new IllegalArgumentException("Load factor must be bigger than 0 and less than 1");
if (capacity <= 0) throw new IllegalArgumentException("Initial capacity must be at least 1");
boolean keyFixedSize = keySerializer instanceof FixedSizeObjectSerializer;
boolean valueFixedSize = valueSerializer instanceof FixedSizeObjectSerializer;
return new LargeHashMap<>(
new ReentrantReadWriteLock(),
withInitial(UnsafeMemoryReader::new),
withInitial(UnsafeMemoryWriter::new),
keySerializer,
keyFixedSize,
keyFixedSize ? 0 : Integer.BYTES,
valueSerializer,
valueFixedSize,
valueFixedSize ? 0 : Integer.BYTES,
loadFactor,
UnsafeUtils.allocate(capacity * Long.BYTES),
capacity,
0,
0,
false
);
}
/**
* Gets key's value from the map
*
* @param key The key to lookup
* @return The value associated with the key
*/
@Override
public V get(@NonNull K key) {
lock.readLock().lock();
try {
throwIfClosed();
long offset = findOffset(key, capacity, entryPointerAddresses);
long entryPointer = UnsafeUtils.getLong(entryPointerAddresses + offset);
return entryPointer != 0 ? readValue(entryPointer) : null;
} finally {
lock.readLock().unlock();
}
}
/**
* Puts the key and value in the map
*
* @param key The key to insert in the map
* @param value The value to insert in the map
* @return The old value related to that key
*/
@Override
public V put(@NonNull K key, @NonNull V value) {
lock.writeLock().lock();
try {
throwIfClosed();
resizeIfRequired();
modifications++;
long offset = findOffset(key, capacity, entryPointerAddresses);
long entryPointer = UnsafeUtils.getLong(entryPointerAddresses + offset);
V previous = null;
if (entryPointer != 0) {
previous = readValue(entryPointer);
UnsafeUtils.free(entryPointer);
} else {
size++;
}
int keySize = keySerializer.sizeInBytes(key);
int valueSize = valueSerializer.sizeInBytes(value);
entryPointer = UnsafeUtils.allocate(keyHeaderSize + keySize + valueHeaderSize + valueSize);
UnsafeUtils.putLong(entryPointerAddresses + offset, entryPointer);
if (!keyFixedSize) UnsafeUtils.putInt(entryPointer, keySize);
keySerializer.serialize(memoryWriter.get().resetTo(entryPointer + keyHeaderSize, keySize), key);
if (!valueFixedSize) UnsafeUtils.putInt(entryPointer + keyHeaderSize + keySize, valueSize);
valueSerializer.serialize(memoryWriter.get().resetTo(entryPointer + keyHeaderSize + keySize + valueHeaderSize, valueSize), value);
return previous;
} finally {
lock.writeLock().unlock();
}
}
/**
* Removes the key from the map if it exists
*
* @param key The key to remove from the map
* @return The value of the key
*/
@Override
public V remove(@NonNull K key) {
lock.writeLock().lock();
try {
throwIfClosed();
resizeIfRequired();
long offset = findOffset(key, capacity, entryPointerAddresses);
long entryPointer = UnsafeUtils.getLong(entryPointerAddresses + offset);
if (entryPointer == 0) return null;
modifications++;
size
V value = readValue(entryPointer);
UnsafeUtils.free(entryPointer);
long index = offset / Long.BYTES;
long bubbleUpIndex = index;
while (true) {
long entryIndex;
do {
bubbleUpIndex = (bubbleUpIndex + 1) % capacity;
entryPointer = UnsafeUtils.getLong(entryPointerAddresses + bubbleUpIndex * Long.BYTES);
if (entryPointer == 0) {
UnsafeUtils.putLong(entryPointerAddresses + index * Long.BYTES, 0);
return value;
}
entryIndex = offset(readKey(entryPointer), capacity);
} while (index <= bubbleUpIndex ? index < entryIndex && entryIndex <= bubbleUpIndex : index < entryIndex || entryIndex <= bubbleUpIndex);
UnsafeUtils.putLong(entryPointerAddresses + index * Long.BYTES, entryPointer);
index = bubbleUpIndex;
}
} finally {
lock.writeLock().unlock();
}
}
/**
* Clear the map from all keys and values
*/
@Override
public void clear() {
lock.writeLock().lock();
try {
throwIfClosed();
for (int i = 0; i < capacity; i++) {
long entryPointerAddress = entryPointerAddresses + i * Long.BYTES;
long entryPointer = UnsafeUtils.getLong(entryPointerAddress);
if (entryPointer != 0) {
modifications++;
UnsafeUtils.free(entryPointer);
UnsafeUtils.putLong(entryPointerAddress, 0);
}
}
size = 0;
} finally {
lock.writeLock().unlock();
}
}
/**
* Gets the current size of the map
*
* @return The size of the map
*/
@Override
public long size() {
lock.readLock().lock();
try {
throwIfClosed();
return size;
} finally {
lock.readLock().unlock();
}
}
/**
* Returns an iterator over elements of type {@code Entry<K, V>}
*
* @return The map's iterator
*/
@Override
public Iterator<Entry<K, V>> iterator() {
lock.readLock().lock();
try {
throwIfClosed();
return new LargeHashMapIterator<>(this, modifications);
} finally {
lock.readLock().unlock();
}
}
/**
* Disposes of the off heap allocations
*/
@Override
public void close() {
lock.writeLock().lock();
try {
throwIfClosed();
clear();
closed = true;
UnsafeUtils.free(entryPointerAddresses);
} finally {
lock.writeLock().unlock();
}
}
/**
* Returns the hash code value for this {@link LargeMap}, i.e.,
* the sum of, for each key-value pair in the map,
* {@code key.hashCode() ^ value.hashCode()}.
*
* @return the hash code value for this map
*/
@Override
public int hashCode() {
int hashCode = 0;
for (Entry<K, V> entry : this) {
hashCode += entry.getKey().hashCode() ^ entry.getValue().hashCode();
}
return hashCode;
}
/**
* Compares the specified object with this map for equality.
* Returns {@code true} if the given object is a map with the same
* mappings as this map.
*
* @param o object to be compared for equality with this map
* @return {@code true} if the specified object is equal to this map
*/
@SuppressWarnings("unchecked")
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
LargeHashMap<K, V> that = (LargeHashMap<K, V>) o;
lock.readLock().lock();
try {
if (size != that.size) return false;
if (closed != that.closed) return false;
} finally {
lock.readLock().unlock();
}
for (Entry<K, V> entry : this) {
V thisValue = entry.getValue();
V thatValue = that.get(entry.getKey());
if (thatValue == null || !thatValue.equals(thisValue)) return false;
}
return true;
}
/**
* Returns a string representation of this map. The string
* representation consists of a list of key-value mappings (in no
* particular order) enclosed in braces ("{@code {}}"). Adjacent
* mappings are separated by the characters {@code ",\n"} (comma
* and new line). Each key-value mapping is rendered as {@code " "}
* (two white spaces; for indentation) the key
* followed by an equals sign ("{@code =}") followed by the
* associated value.
*
* @return a string representation of this map
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append('{').append(System.lineSeparator());
for (Iterator<Entry<K, V>> iterator = iterator(); iterator.hasNext(); ) {
Entry<K, V> entry = iterator.next();
sb.append(" ").append(entry);
if (iterator.hasNext()) {
sb.append(",");
}
sb.append(System.lineSeparator());
}
return sb.append('}').toString();
}
private void throwIfClosed() {
if (closed) throw new IllegalStateException("Map was already closed");
}
/**
* Find the offset of a key in the map's underlying array
*
* @param key The key for which to find the offset
* @param capacity The current map capacity
* @param entryPointerAddresses The address to the start of memory allocated for entry pointers
* @return The offset of such key in the map
*/
private long findOffset(K key, long capacity, long entryPointerAddresses) {
long offset = offset(key, capacity);
long entryPointer = UnsafeUtils.getLong(entryPointerAddresses + offset * Long.BYTES);
while (entryPointer != 0 && !key.equals(readKey(entryPointer))) {
offset = ++offset % capacity;
entryPointer = UnsafeUtils.getLong(entryPointerAddresses + offset * Long.BYTES);
}
return offset * Long.BYTES;
}
/**
* Resize the map underlying array if required
*/
private void resizeIfRequired() {
double load = size / (double) capacity;
long newCapacity;
if (load > loadFactor) {
newCapacity = capacity * 2;
} else if (load < loadFactor / 2 && capacity / 2 > DEFAULT_CAPACITY) {
newCapacity = capacity / 2;
} else {
return;
}
modifications++;
long newEntryPointerAddresses = UnsafeUtils.allocate(newCapacity * Long.BYTES);
for (int i = 0; i < capacity; i++) {
long entryPointer = UnsafeUtils.getLong(entryPointerAddresses + i * Long.BYTES);
if (entryPointer != 0) {
K key = readKey(entryPointer);
long offset = findOffset(key, newCapacity, newEntryPointerAddresses);
UnsafeUtils.putLong(newEntryPointerAddresses + offset, entryPointer);
}
}
UnsafeUtils.free(entryPointerAddresses);
capacity = newCapacity;
entryPointerAddresses = newEntryPointerAddresses;
}
/**
* Read a key from the map given the entry address pointer
*
* @param entryPointer The entry address pointer
* @return The entry's key
*/
private K readKey(long entryPointer) {
int keySize = keyFixedSize ? keySerializer.sizeInBytes(null) : UnsafeUtils.getInt(entryPointer);
MemoryReader reader = memoryReader.get().resetTo(entryPointer + keyHeaderSize, keySize);
return keySerializer.deserialize(reader);
}
/**
* Read a value from the map given the entry address pointer
*
* @param entryPointer The entry address pointer
* @return The entry's value
*/
private V readValue(long entryPointer) {
int keySize = keyFixedSize ? keySerializer.sizeInBytes(null) : UnsafeUtils.getInt(entryPointer);
long valuePointer = entryPointer + keyHeaderSize + keySize;
int valueSize = valueFixedSize ? valueSerializer.sizeInBytes(null) : UnsafeUtils.getInt(valuePointer);
MemoryReader reader = memoryReader.get().resetTo(valuePointer + valueHeaderSize, valueSize);
return valueSerializer.deserialize(reader);
}
/**
* Read an entry from the map given its address pointer
*
* @param entryPointer The entry address pointer
* @return The key and value pair
*/
private Entry<K, V> readEntry(long entryPointer) {
return new SimpleImmutableEntry<>(readKey(entryPointer), readValue(entryPointer));
}
/**
* Helper function to hash, spread, and mod by capacity
*
* @param key The key to hash
* @param capacity The current capacity
* @return The desired offset
*/
private long offset(K key, long capacity) {
return spread(key.hashCode()) % capacity;
}
/**
* Copied from {@link java.util.concurrent.ConcurrentHashMap}:
* Spreads (XORs) higher bits of hash to lower and also forces top
* bit to 0. Because the table uses power-of-two masking, sets of
* hashes that vary only in bits above the current mask will
* always collide. (Among known examples are sets of Float keys
* holding consecutive whole numbers in small tables.) So we
* apply a transform that spreads the impact of higher bits
* downward. There is a tradeoff between speed, utility, and
* quality of bit-spreading. Because many common sets of hashes
* are already reasonably distributed (so don't benefit from
* spreading), and because we use trees to handle large sets of
* collisions in bins, we just XOR some shifted bits in the
* cheapest possible way to reduce systematic lossage, as well as
* to incorporate impact of the highest bits that would otherwise
* never be used in index calculations because of table bounds.
*
* @param hashCode The original hash code for the object
* @return The new spread and positive hash code
*/
private static int spread(int hashCode) {
return (hashCode ^ (hashCode >>> 16)) & Integer.MAX_VALUE;
}
/**
* LargeHashMapIterator, an inner class wrapping the iterator logic for the map
*
* @param <K> The key type
* @param <V> The value type
*/
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
private static class LargeHashMapIterator<K, V> implements Iterator<Entry<K, V>> {
/**
* A reference to the map this iterator is iterating on
*/
@NonNull
private final LargeHashMap<K, V> map;
/**
* The number of modifications at the time we initialized this iterator,
* this is to help fail fast if the map was changed midway
*/
private final long expectedModifications;
/**
* The number of read items
*/
private long read = 0;
/**
* The current offset in the entry pointers
*/
private long offset = 0;
/**
* Returns {@code true} if the iteration has more elements.
* (In other words, returns {@code true} if {@link #next} would
* return an element rather than throwing an exception.)
*
* @return {@code true} if the iteration has more elements
*/
@Override
public boolean hasNext() {
map.lock.readLock().lock();
try {
if (expectedModifications == map.modifications) {
return read < map.size;
}
} finally {
map.lock.readLock().unlock();
}
throw new ConcurrentModificationException("Map has been modified since iterator was created");
}
/**
* Returns the next element in the iteration.
*
* @return the next element in the iteration
* @throws NoSuchElementException if the iteration has no more elements
*/
@Override
public Entry<K, V> next() {
if (hasNext()) {
map.lock.readLock().lock();
try {
long entryPointer = UnsafeUtils.getLong(map.entryPointerAddresses + offset);
offset += Long.BYTES;
while (entryPointer == 0) {
entryPointer = UnsafeUtils.getLong(map.entryPointerAddresses + offset);
offset += Long.BYTES;
}
read++;
return map.readEntry(entryPointer);
} finally {
map.lock.readLock().unlock();
}
} else {
throw new NoSuchElementException("Iterator exhausted, please use hasNext() to for available items first");
}
}
}
}
|
package com.github.sbugat.nqueens;
public abstract class GenericInstrumentedNQueensSolver extends GenericNQueensSolver {
/** Number of queen placements counter. */
protected long queenPlacementsCount;
/** Number of square reads counter. */
protected long squareReadsCount;
/** Number of tests done counter. */
protected long squareWritesCount;
/** Number of explicittests done counter. */
protected long explicitTestsCount;
/** Number of implicit tests done counter (loop). */
protected long implicitTestsCount;
/** Number of methods calls counter. */
protected long methodCallsCount;
protected GenericInstrumentedNQueensSolver(final int chessboardSizeArg, final boolean printSolutionArg) {
super(chessboardSizeArg, printSolutionArg);
}
@Override
public void reset() {
super.reset();
// Reinitialize generic instrumentations
queenPlacementsCount = 0;
squareReadsCount = 0;
squareWritesCount = 0;
explicitTestsCount = 0;
implicitTestsCount = 0;
methodCallsCount = 0;
}
@Override
public String toString() {
final StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("queen placements count:");
stringBuilder.append(queenPlacementsCount);
stringBuilder.append(System.lineSeparator());
stringBuilder.append("square reads count:");
stringBuilder.append(squareReadsCount);
stringBuilder.append(System.lineSeparator());
stringBuilder.append("qquare writes count:");
stringBuilder.append(squareWritesCount);
stringBuilder.append(System.lineSeparator());
stringBuilder.append(System.lineSeparator());
stringBuilder.append("explicit tests count:");
stringBuilder.append(explicitTestsCount);
stringBuilder.append(System.lineSeparator());
stringBuilder.append("implicit tests count:");
stringBuilder.append(implicitTestsCount);
stringBuilder.append(System.lineSeparator());
stringBuilder.append("total tests count:");
stringBuilder.append(explicitTestsCount + implicitTestsCount);
stringBuilder.append(System.lineSeparator());
stringBuilder.append(System.lineSeparator());
stringBuilder.append("method calls count:");
stringBuilder.append(methodCallsCount);
return stringBuilder.toString();
}
}
|
package com.kryptnostic.rhizome.hazelcast.serializers;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.net.URL;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.collect.Maps;
import com.google.common.io.Resources;
import com.google.common.primitives.Ints;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.kryptnostic.rhizome.hazelcast.objects.OrderedUUIDSet;
import com.kryptnostic.rhizome.hazelcast.objects.UUIDSet;
public class RhizomeUtils {
static final Logger logger = LoggerFactory.getLogger( RhizomeUtils.class );
public static class Serializers {
public static <T extends Enum<T>> void serializeEnumSet( ObjectDataOutput out, Class<T> clazz, Set<T> set )
throws IOException {
T[] enumConstants = clazz.getEnumConstants();
if ( enumConstants == null ) {
throw new IllegalArgumentException( "This method may only be called for Enum classes" );
}
for ( T t : enumConstants ) {
out.writeBoolean( set.contains( t ) );
}
}
public static <T extends Enum<T>> EnumSet<T> deSerializeEnumSet( ObjectDataInput in, Class<T> clazz )
throws IOException {
T[] enumConstants = clazz.getEnumConstants();
if ( enumConstants == null ) {
throw new IllegalArgumentException( "This method may only be called for Enum classes" );
}
EnumSet<T> elements = EnumSet.<T> noneOf( clazz );
for ( T t : enumConstants ) {
if ( in.readBoolean() ) {
elements.add( t );
}
}
return elements;
}
public static <T> void serializeOptional(
ObjectDataOutput out,
Optional<T> object,
IoPerformingBiConsumer<ObjectDataOutput, T> serializer ) throws IOException {
out.writeBoolean( object.isPresent() );
if ( object.isPresent() ) {
T value = object.get();
serializer.accept( out, value );
}
}
public static <T> Optional<T> deserializeToOptional(
ObjectDataInput in,
IoPerformingFunction<ObjectDataInput, T> deserializer ) throws IOException {
Optional<T> object = Optional.absent();
if ( in.readBoolean() ) {
object = Optional.of( deserializer.apply( in ) );
}
return object;
}
}
public static class Sets {
public static OrderedUUIDSet newOrderedUUIDSetWithExpectedSize( int expected ) {
return new OrderedUUIDSet( expectedSize( expected ) );
}
public static UUIDSet newUUIDSetWithExpectedSize( int expected ) {
return new UUIDSet( expectedSize( expected ) );
}
public static int expectedSize( int expectedSize ) {
if ( expectedSize < 0 ) {
throw new IllegalArgumentException( "expectedSize cannot be negative but was: " + expectedSize );
}
if ( expectedSize < 3 ) {
return expectedSize + 1;
}
if ( expectedSize < Ints.MAX_POWER_OF_TWO ) {
return expectedSize + expectedSize / 3;
}
return Integer.MAX_VALUE;
}
}
public static class Streams {
public static void writeStringStringMap( ObjectDataOutput out, Map<String, String> object )
throws IOException {
int size = object.size();
Set<String> keys = object.keySet();
Collection<String> vals = object.values();
RhizomeUtils.Streams.writeStringArray( out, keys.toArray( new String[ size ] ) );
RhizomeUtils.Streams.writeStringArray( out, vals.toArray( new String[ size ] ) );
}
public static Map<String, String> readStringStringMap( ObjectDataInput in ) throws IOException {
String[] keys = RhizomeUtils.Streams.readStringArray( in );
String[] vals = RhizomeUtils.Streams.readStringArray( in );
Map<String, String> map = Maps.newHashMapWithExpectedSize( keys.length );
for ( int i = 0; i < keys.length; i++ ) {
map.put( keys[ i ], vals[ i ] );
}
return map;
}
public static void writeStringArray( ObjectDataOutput out, String[] strings ) throws IOException {
out.writeInt( strings.length );
for ( String string : strings ) {
out.writeUTF( string );
}
}
public static String[] readStringArray( ObjectDataInput in ) throws IOException {
int size = in.readInt();
String[] strings = new String[ size ];
for ( int i = 0; i < size; i++ ) {
strings[ i ] = in.readUTF();
}
return strings;
}
public static void writeByteArray( ObjectOutput out, byte[] bytes ) throws IOException {
out.writeInt( bytes.length );
out.write( bytes, 0, bytes.length );
}
public static byte[] readByteArray( ObjectInput in ) throws IOException {
int size = in.readInt();
byte[] result = new byte[ size ];
in.readFully( result );
return result;
}
public static String loadResourceToString( final String path ) {
try {
URL resource = Resources.getResource( path );
return Resources.toString( resource, StandardCharsets.UTF_8 );
} catch ( IOException | IllegalArgumentException e ) {
logger.error( "Failed to load resource from " + path, e );
return null;
}
}
}
}
|
package com.nextcloud.android.sso.exceptions;
import android.content.Context;
import com.nextcloud.android.sso.model.ExceptionMessage;
public class UnknownErrorException extends SSOException {
public UnknownErrorException() {
super();
}
public UnknownErrorException(String message) {
super();
this.em = new ExceptionMessage("", message);
}
@Override
public void loadExceptionMessage(Context context) {
if(this.em == null) {
super.loadExceptionMessage(context);
}
}
}
|
package com.persado.oss.quality.stevia.selenium.core;
import com.persado.oss.quality.stevia.annotations.AnnotationsHelper;
import com.persado.oss.quality.stevia.selenium.core.controllers.WebDriverWebController;
import com.persado.oss.quality.stevia.testng.Verify;
import org.openqa.selenium.WebDriverException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
/**
* The Class SteviaContext.
*/
public class SteviaContext {
/**
* The Constant STEVIA_CONTEXT_LOG.
*/
private static final Logger LOG = LoggerFactory.getLogger(SteviaContext.class);
private static final AtomicInteger threadSeq = new AtomicInteger((int) (System.currentTimeMillis() % 0xcafe));
/**
* The inner Class Context.
*/
static class Context {
/**
* The controller.
*/
private WebController controller;
/**
* The verify.
*/
private Verify verify;
/**
* The is web driver.
*/
private boolean isWebDriver;
/**
* The params registry.
*/
private Map<String, String> paramsRegistry;
private int waitForPageToLoad = 120;
private int waitForAjaxComplete = 120000;
private int waitForElement = 10;
private int waitForWindow = 10;
private int waitForElementInvisibility = 1;
private ApplicationContext context;
private TestState state;
/**
* Clear context.
*/
public void clear() {
if (controller != null) {
try {
controller.quit();
} catch (WebDriverException wde) {
LOG.warn("Exception caught calling controller.quit(): \"" + wde.getMessage() + "\" additional info: " + wde.getAdditionalInformation());
}
}
controller = null;
verify = null;
isWebDriver = false;
if (paramsRegistry != null) {
paramsRegistry.clear();
}
context = null;
state = null;
Thread.currentThread().setName("Stevia - Inactive");
LOG.info("Context closed, controller shutdown");
AnnotationsHelper.disposeControllers();
}
public int getWaitForPageToLoad() {
return waitForPageToLoad;
}
public void setWaitForPageToLoad(int waitForPageToLoad) {
this.waitForPageToLoad = waitForPageToLoad;
}
public int getWaitForElement() {
return waitForElement;
}
public void setWaitForElement(int waitForElement) {
this.waitForElement = waitForElement;
}
public int getWaitForAjaxComplete() {
return waitForAjaxComplete;
}
public void setWaitForAjaxComplete(int waitForAjaxComplete) {
this.waitForAjaxComplete = waitForAjaxComplete;
}
public int getWaitForElementInvisibility() {
return waitForElementInvisibility;
}
public void setWaitForElementInvisibility(int waitForElementInvisibility) {
this.waitForElementInvisibility = waitForElementInvisibility;
}
public int getWaitForWindow() {
return waitForWindow;
}
public void setWaitForWindow(int waitForWindow) {
this.waitForWindow = waitForWindow;
}
public ApplicationContext getContext() {
return context;
}
public void setContext(ApplicationContext context) {
this.context = context;
}
}
/**
* The inner context as a thread local variable.
*/
private static ThreadLocal<Context> innerContext = new ThreadLocal<SteviaContext.Context>() {
@Override
protected Context initialValue() {
return new Context(); //initial is empty;
}
};
/**
* Gets the web controller.
*
* @return the web controller
*/
public static WebController getWebController() {
return innerContext.get().controller;
}
/**
* Determines the instance of the Web Controller
*
* @return true, if it is instance of WebDriverWebController false if it is instance of SeleniumWebController
*/
public static boolean isWebDriver() {
return innerContext.get().isWebDriver;
}
/**
* Adds parameters to registry; if a parameter exists already it will be overwritten.
*
* @param params a type of SteviaContextParameters
*/
public static void registerParameters(SteviaContextParameters params) {
Map<String, String> paramsRegistry = innerContext.get().paramsRegistry;
if (paramsRegistry == null) {
innerContext.get().paramsRegistry = new HashMap<String, String>();
}
innerContext.get().paramsRegistry.putAll(params.getAllParameters());
LOG.warn("Thread {} just registered {}", new Object[]{Thread.currentThread().getName(), params.getAllParameters()});
}
/**
* get a parameter from the registry.
*
* @param paramName the param name
* @return the parameter value
*/
public static String getParam(String paramName) {
return innerContext.get().paramsRegistry.get(paramName);
}
/**
* Gets the params.
*
* @return a Map of the registered parameters
*/
public static Map<String, String> getParams() {
return innerContext.get().paramsRegistry;
}
/**
* get the test state that is relevant to the running thread for this test script
*
* @return <T extends TestState> T an object that implements TestState
*/
@SuppressWarnings("unchecked")
public static <T extends TestState> T getTestState() {
return (T) innerContext.get().state;
}
/**
* set the test state at any given time for the running thread.
*
* @param state an object implementing the marker interface
*/
public static <T extends TestState> void setTestState(T state) {
innerContext.get().state = state;
}
/**
* Register the controller in the context of current thread's copy for this thread-local variable.
*
* @param instance the new web controller
*/
public static void setWebController(WebController instance) {
Context context = innerContext.get();
context.controller = instance;
if (instance instanceof WebDriverWebController) {
context.isWebDriver = true;
LOG.warn("Handle is : " + ((WebDriverWebController) instance).getDriver().getWindowHandle());
} else {
context.isWebDriver = false;
}
Thread.currentThread().setName(
"Stevia [" + (context.isWebDriver ? "WD" : "RC") + " "
+ instance.getClass().getSimpleName() + "@"
+ Integer.toHexString(threadSeq.incrementAndGet()) + "]");
LOG.info("Context ready, controller is now set, type is {}", context.isWebDriver ? "WebDriver" : "SeleniumRC");
}
/**
* return a verify helper initialized with the right controller.
*
* @return the verify
*/
public static Verify verify() {
Context context = innerContext.get();
if (context.verify == null) {
context.verify = new Verify();
}
return context.verify;
}
/**
* Clean the local thread context
*/
public static void clean() {
innerContext.get().clear();
innerContext.remove();
innerContext = null;
System.gc();
}
public static int getWaitForPageToLoad() {
return innerContext.get().getWaitForPageToLoad();
}
public static void setWaitForPageToLoad(int waitForPageToLoad) {
innerContext.get().setWaitForPageToLoad(waitForPageToLoad);
}
public static int getWaitForElement() {
return innerContext.get().getWaitForElement();
}
public static int getWaitForAjaxComplete() {
return innerContext.get().getWaitForAjaxComplete();
}
public static void setWaitForAjaxComplete(int waitForAjaxComplete) {
innerContext.get().setWaitForAjaxComplete(waitForAjaxComplete);
}
public static void setWaitForElement(int waitForElement) {
innerContext.get().setWaitForElement(waitForElement);
}
public static int getWaitForElementInvisibility() {
return innerContext.get().getWaitForElementInvisibility();
}
public static void setWaitForElementInvisibility(int waitForElementInvisibility) {
innerContext.get().setWaitForElementInvisibility(waitForElementInvisibility);
}
public static void setWaitForNewWindow(int waitForNewWindow) {
innerContext.get().setWaitForWindow(waitForNewWindow);
}
public static int getWaitForNewWindow() {
return innerContext.get().getWaitForWindow();
}
public static void attachSpringContext(ApplicationContext applicationContext) {
innerContext.get().setContext(applicationContext);
}
public static ApplicationContext getSpringContext() {
return innerContext.get().getContext();
}
}
|
package com.tealcube.minecraft.bukkit.facecore.ui;
import com.google.common.base.Preconditions;
import com.tealcube.minecraft.bukkit.TextUtils;
import com.tealcube.minecraft.bukkit.mirror.ClassType;
import com.tealcube.minecraft.bukkit.mirror.Mirror;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import java.lang.reflect.Method;
public final class ActionBarMessage {
private static final Class<?> CHAT_COMPONENT_TEXT;
private static final Class<?> CHAT_BASE_COMPONENT;
private static final Class<?> PACKET;
private static final Class<?> PLAY_OUT_CHAT_PACKET;
static {
CHAT_COMPONENT_TEXT = Mirror.getClass("ChatComponentText", ClassType.MINECRAFT_SERVER);
CHAT_BASE_COMPONENT = Mirror.getClass("IChatBaseComponent", ClassType.MINECRAFT_SERVER);
PACKET = Mirror.getClass("Packet", ClassType.MINECRAFT_SERVER);
PLAY_OUT_CHAT_PACKET = Mirror.getClass("PacketPlayOutChat", ClassType.MINECRAFT_SERVER);
}
private ActionBarMessage() {
// do nothing, make it a singleton
}
public static void send(Iterable<Player> players, String message) {
for (Player player : players) {
send(player, message);
}
}
public static void send(Player player, String message) {
Preconditions.checkNotNull(CHAT_COMPONENT_TEXT);
Preconditions.checkNotNull(CHAT_BASE_COMPONENT);
Preconditions.checkNotNull(PACKET);
Preconditions.checkNotNull(PLAY_OUT_CHAT_PACKET);
try {
Object chatComponentText = CHAT_COMPONENT_TEXT.getConstructor(String.class).
newInstance(TextUtils.color(message));
Object packet = PLAY_OUT_CHAT_PACKET.getConstructor(CHAT_BASE_COMPONENT, Byte.TYPE).
newInstance(chatComponentText, (byte) 2);
Object handle = Mirror.getMethod(player.getClass(), "getHandle").invoke(player);
Object connection = Mirror.getField(handle.getClass(), "playerConnection").get(handle);
Method sendPacket = Mirror.getMethod(connection.getClass(), "sendPacket", PACKET);
sendPacket.invoke(connection, packet);
} catch (Exception e) {
Bukkit.getLogger().severe(e.getMessage());
}
}
}
|
package com.ziffit.autoconfigure.spring.security;
public class SecurityConstants {
public static final String AUTHORIZATION_HEADER = "Authorization";
public static final String ROLE_ANONYMOUS = "ROLE_ANONYMOUS";
public static final String ROLE_USER = "ROLE_USER";
public static final String JWT_AUTHORITIES_KEY = "authorities";
}
|
package cz.cvut.fel.a4m36jee.airlines.view;
import cz.cvut.fel.a4m36jee.airlines.model.Flight;
import cz.cvut.fel.a4m36jee.airlines.model.Reservation;
import cz.cvut.fel.a4m36jee.airlines.service.FlightService;
import cz.cvut.fel.a4m36jee.airlines.service.ReservationService;
import javax.enterprise.context.RequestScoped;
import javax.faces.bean.ManagedBean;
import javax.faces.context.FacesContext;
import javax.inject.Inject;
import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.WebApplicationException;
import java.io.IOException;
import java.util.Date;
import java.util.List;
import java.util.logging.Logger;
/**
* View resources for reservation.
*
* @author slavion3
*/
@RequestScoped
@ManagedBean(name = "reservationViewResource")
public class ReservationViewResource {
/**
* Logger.
*/
@Inject
private Logger logger;
/**
* Service for reservations.
*/
@Inject
private ReservationService reservationService;
@Inject
private FlightService flightService;
/**
* Find all reservations.
* @return List of reservations
* @throws IOException if redirect is unsuccessful
*/
public List<Reservation> getAllReservations() throws IOException {
HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse();
try {
List<Reservation> reservations = reservationService.list();
logger.info("Reservation list received.");
return reservations;
} catch (Exception e) { //TODO exception
logger.severe( "Error during receive reservation list!");
response.sendRedirect("/airlines/error/");
throw new WebApplicationException();
}
}
/**
* Find reservations for flight.
* @param flightId flight ID
* @return List of reservations
* @throws IOException if redirect is unsuccessful
*/
public List<Reservation> getFlightReservation(final long flightId) throws IOException {
HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse();
try {
List<Reservation> reservations = reservationService.listByFlightId(flightId); //TODO correct attribute
logger.info("Reservation list for flight with id " + flightId + " received.");
return reservations;
} catch (Exception e) { //TODO exception
logger.severe( "Error during receive reservation list for flight with id " + flightId + "!");
response.sendRedirect("/airlines/error/");
throw new WebApplicationException();
}
}
/**
* Find reservation.
* @param id reservation ID
* @return reservation
* @throws IOException if redirect is unsuccessful
*/
public Reservation getReservation(final long id) throws IOException {
HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse();
try {
Reservation reservation = reservationService.get(id);
logger.info("Reservation with id " + id +" found.");
return reservation;
} catch (Exception e) { //TODO exception
logger.severe( "Error during find reservation with id " + id +"!");
response.sendRedirect("/airlines/error/");
throw new WebApplicationException();
}
}
/**
* Create new reservation.
* @param reservation new reservation
* @throws IOException if redirect is unsuccessful
*/
public void createReservation(final Reservation reservation, final Flight flight) throws IOException {
HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse();
reservation.setFlight(flight);
reservation.setCreated(new Date());
try {
reservationService.create(reservation);
logger.info("New reservation created.");
response.sendRedirect("/airlines/flight/reservation/?id="+flight.getId());
} catch (Exception e) { //TODO exception
logger.severe( "Error during create reservation!");
response.sendRedirect("/airlines/error/");
}
}
/**
* Delete reservation.
* @param id reservation ID
* @throws IOException if redirect is unsuccessful
*/
public void deleteReservation(final long id, final long flightId, final String password) throws IOException {
HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance().getExternalContext().getResponse();
try {
reservationService.delete(id, password);
logger.info("Reservation with id " + id + " deleted.");
response.sendRedirect("/airlines/flight/reservation/?id="+flightId);
} catch (Exception e) { //TODO exception
logger.severe( "Error during delete reservation with id " + id + "!");
response.sendRedirect("/airlines/error/");
}
}
}
|
package de.frosner.datagenerator.gui.main;
import javax.swing.SwingWorker;
import de.frosner.datagenerator.export.CsvExportConfiguration;
import de.frosner.datagenerator.generator.DataGeneratorService;
/**
* {@link SwingWorker} accessing the {@link DataGeneratorService} in a separate thread. It also toggles the generate and
* abort buttons.
*/
public class GenerateDataButtonWorker extends SwingWorker<Void, Void> {
private final int _numberOfInstances;
private final CsvExportConfiguration _config;
public GenerateDataButtonWorker(int numberOfInstances, CsvExportConfiguration config) {
_numberOfInstances = numberOfInstances;
_config = config;
}
@Override
protected Void doInBackground() {
SwingLauncher.GUI.enableGenerateDataButton(false);
SwingLauncher.GUI.enableAbortDataGenerationButton(true);
DataGeneratorService.INSTANCE.generateData(_numberOfInstances, _config);
return null;
}
@Override
protected void done() {
SwingLauncher.GUI.enableGenerateDataButton(true);
SwingLauncher.GUI.enableAbortDataGenerationButton(false);
SwingLauncher.GUI.detachGenerateDataButtonWorker();
}
}
|
package middleman;
import static org.hamcrest.CoreMatchers.*;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.notNullValue;
import middleman.proxy.DummyHttpServer;
import middleman.proxy.DummyHttpServer.ReceivedRequest;
import middleman.utils.Block;
import org.junit.BeforeClass;
import org.junit.Test;
public class RequestModificationFunctionalTests {
static TestContext testContext;
@BeforeClass
public static void setupEnvironment() {
testContext = TestContext.using(TestAsset.middlemanPassingThrough().with(TestAsset.firefoxWithMiddlemanProxy()));
}
@Test
public void shouldSetRequestHeaderIfSpecifiedThroughApi() {
testContext.verify(new Block<TestContext>() {
public void yield(TestContext ctx) {
TestAsset.middlemanInstance().api().addRequestModifier().addingHeader("HeaderAbc", "xyz");
DummyHttpServer proxiedServer = new DummyHttpServer(8082).start();
String proxiedPort = "8082";
TestAsset.requestFromProxy("http://localhost:" + proxiedPort + "/");
ReceivedRequest firstReceivedRequest = proxiedServer.receivedRequests().get(0);
System.out.print(firstReceivedRequest);
System.out.print(firstReceivedRequest.getUrl());
System.out.print(firstReceivedRequest.getHeaders().getFirst("HeaderAbc"));
assertThat(firstReceivedRequest, is(notNullValue()));
assertThat(firstReceivedRequest.getUrl(), containsString(proxiedPort));
assertThat(firstReceivedRequest.getHeaders().getFirst("HeaderAbc"), is("xyz"));
}
});
}
private static void sleep(int milliseconds) {
try {
Thread.sleep(milliseconds);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
|
package eu.musesproject.server.knowledgecompiler;
import java.util.List;
import org.apache.log4j.Logger;
import eu.musesproject.server.dataminer.DataMiner;
import eu.musesproject.server.db.handler.DBManager;
import eu.musesproject.server.entity.SecurityRules;
import eu.musesproject.server.knowledgerefinementsystem.model.Message;
import eu.musesproject.server.knowledgerefinementsystem.model.Pattern;
import eu.musesproject.server.scheduler.ModuleType;
/**
* Class KnowledgeCompiler
*
* @author Sergio Zamarripa (S2)
* @version Oct 7, 2013
*/
public class KnowledgeCompiler {
private static DBManager dbManager = new DBManager(ModuleType.KRS);
private Logger logger = Logger.getLogger(DataMiner.class);
/**
* Info DM
*
* This method starts the process of compilation of new rules with the new mined patterns (changes of environment), based on
* current rules.
*
*
*
* @param void (it retrieves previously mined patterns, already notified during the mining process)
*
* @return void
*/
public void compileNewRules(){
List<SecurityRules> dmRules = dbManager.getSecurityRulesByStatus("VALIDATED");
}
/**
* Info DM
*
* This method is used by the data miner to notify the finding of a new pattern associated to a clue pattern (sequence of events
* associated to a security incident)
*
*
*
* @param pattern
*
* @return void
*/
public void notifyPattern(Pattern pattern){
}
/**
* Info KN
*
* This method is used by the knowledge compiler to log the processing results and other feedback to be available for the CSO
*
*
*
* @param message
*
* @return void
*/
public void logProcessingResult(Message message){
}
}
|
package pt.fccn.arquivo.pages;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.support.ui.ExpectedConditions;
import org.openqa.selenium.support.ui.WebDriverWait;
import org.openqa.selenium.WebElement;
/**
* Represents the page with the terms and conditions.
* @author Simao Fontes
*
*/
public class TermsAndConditionsPage {
private final WebDriver driver;
// private static final String urlName = "terms-conditions.jsp";
private static final String titleTextPT = "Termos e Condições";
private static final String titleTextEN = "Terms and Conditions";
private static final String linkTextPT = "Português";
private static final String linkTextEN = "English";
private static final String contentID = "conteudo-termos";
/**
* Constructs a Terms and conditions.
* @param driver <code>org.openqa.selenium.WebDriver</code> use for this page
*/
public TermsAndConditionsPage(WebDriver driver) {
this.driver = driver;
if(! (new WebDriverWait(driver, 25)) /* Wait Up to 25 seconds should throw RunTimeExcpetion*/
.until(ExpectedConditions.titleContains(titleTextPT))){
System.out.println("Obtained title: " + driver.getTitle());
throw new IllegalStateException("This is not the terms and conditions page\n"+driver.getCurrentUrl()+""+this.getClass().getName());
}
}
/**
* Change to the English version of the page
*/
public boolean toEnglishVersion(){
// Check that we're on the right page.
WebElement linkTextENElement = (new WebDriverWait(driver, 25)) /* Wait Up to 25 seconds should throw RunTimeExcpetion*/
.until(ExpectedConditions.presenceOfElementLocated(By.linkText(linkTextEN)));
linkTextENElement.click();
if(! (new WebDriverWait(driver, 25)) /* Wait Up to 25 seconds should throw RunTimeExcpetion*/
.until(ExpectedConditions.titleContains(titleTextEN))){
System.out.println("Expected: " + titleTextEN);
System.out.println("Found: " + driver.getTitle());
throw new IllegalStateException("This is not the terms and conditions page, in English\nTitle received is " + driver.getTitle()+" "+this.getClass().getName());
//return false;
}
return true;
}
/**
* Change to the Portuguese version of the page
*/
public boolean toPortugueseVersion(){
driver.findElement(By.linkText(linkTextPT)).click();
try {
Thread.sleep(5000); //wait for page to load
} catch(InterruptedException ex) {
Thread.currentThread().interrupt();
}
String titleFound = driver.getTitle();
// Check that we're on the right page.
if (!titleFound.contains(titleTextPT)) {
System.out.println("Expected: " + titleTextPT);
System.out.println("Found: " + titleFound);
// Alternatively, we could navigate to the login page, perhaps logging out first
return false;
}
return true;
}
}
|
package info.u_team.u_team_core.gui.renderer;
import java.util.function.Supplier;
import com.mojang.blaze3d.matrix.MatrixStack;
import info.u_team.u_team_core.util.RenderUtil;
import info.u_team.u_team_core.util.RenderUtil.Matrix4fExtended;
import net.minecraft.client.*;
import net.minecraft.client.gui.FontRenderer;
import net.minecraft.util.math.MathHelper;
import net.minecraft.util.math.vector.Vector4f;
public class ScrollingTextRenderer extends ScalingTextRenderer {
protected int width;
protected float stepSize;
protected int speedTime;
protected int waitTime;
protected float moveDifference = 0;
protected long lastTime = 0;
protected State state = State.WAITING;
public ScrollingTextRenderer(FontRenderer fontRenderer, Supplier<String> textSupplier, float x, float y) {
super(fontRenderer, textSupplier, x, y);
width = 100;
stepSize = 1;
speedTime = 20;
waitTime = 4000;
}
public int getWidth() {
return width;
}
public void setWidth(int width) {
this.width = width;
}
public float getStepSize() {
return stepSize;
}
public void setStepSize(float stepSize) {
this.stepSize = stepSize;
}
public int getSpeedTime() {
return speedTime;
}
public void setSpeedTime(int speedtime) {
this.speedTime = speedtime;
}
public int getWaitTime() {
return waitTime;
}
public void setWaitTime(int waittime) {
this.waitTime = waittime;
}
@Override
protected void updatedText() {
state = State.WAITING;
moveDifference = 0;
lastTime = 0;
}
@Override
public void render(MatrixStack matrixStack, int mouseX, int mouseY, float partialTicks) {
final Minecraft minecraft = Minecraft.getInstance();
final MainWindow window = minecraft.getMainWindow();
final Matrix4fExtended matrix = new Matrix4fExtended(matrixStack.getLast().getMatrix());
final double scaleFactor = window.getGuiScaleFactor();
final Vector4f vectorXY = new Vector4f(x, y, 0, 1);
vectorXY.transform(matrix);
// Cannot use transform here, because we only care about the scaling. M00 and M11 should have the right scaling
final Vector4f vectorWH = new Vector4f(width * matrix.getM00(), (fontRenderer.FONT_HEIGHT + 1) * scale * matrix.getM11(), 0, 1);
final int nativeX = MathHelper.ceil(vectorXY.getX() * scaleFactor);
final int nativeY = MathHelper.ceil(vectorXY.getY() * scaleFactor);
final int nativeWidth = MathHelper.ceil(vectorWH.getX() * scaleFactor);
final int nativeHeight = MathHelper.ceil(vectorWH.getY() * scaleFactor);
RenderUtil.enableScissor(nativeX, window.getHeight() - (nativeY + nativeHeight), nativeWidth, nativeHeight);
// Uncomment to test scissor
// matrixStack.push();
// matrixStack.getLast().getMatrix().setIdentity();
// AbstractGui.fill(matrixStack, 0, 0, window.getScaledWidth(), window.getScaledHeight(), 0x8F00FF00);
// matrixStack.pop();
setText(textSupplier.get());
renderFont(matrixStack, fontRenderer, getMovingX(x), y + 2 * scale);
RenderUtil.disableScissor();
}
protected float getMovingX(float x) {
final float textWidth = getTextWidth();
if (width < textWidth) {
final float maxMove = width - textWidth;
if (lastTime == 0) {
lastTime = System.currentTimeMillis();
}
if (state == State.WAITING) {
if (hasWaitTimePassed()) {
state = moveDifference >= 0 ? State.LEFT : State.RIGHT;
lastTime = 0;
}
} else {
if (hasSpeedTimePassed()) {
if (state == State.LEFT ? moveDifference >= maxMove : moveDifference <= 0) {
moveDifference += state == State.LEFT ? -stepSize : +stepSize;
} else {
state = State.WAITING;
}
lastTime = 0;
}
}
return x + moveDifference;
}
return x;
}
protected boolean hasWaitTimePassed() {
return System.currentTimeMillis() - waitTime >= lastTime;
}
protected boolean hasSpeedTimePassed() {
return System.currentTimeMillis() - speedTime >= lastTime;
}
private enum State {
WAITING,
LEFT,
RIGHT;
}
}
|
package io.scalecube.gateway.websocket.message;
import static com.fasterxml.jackson.core.JsonToken.VALUE_NULL;
import static io.scalecube.gateway.websocket.message.GatewayMessage.DATA_FIELD;
import static io.scalecube.gateway.websocket.message.GatewayMessage.SIGNAL_FIELD;
import static io.scalecube.gateway.websocket.message.GatewayMessage.STREAM_ID_FIELD;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonEncoding;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.MappingJsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufInputStream;
import io.netty.buffer.ByteBufOutputStream;
import io.scalecube.gateway.ReferenceCountUtil;
import io.scalecube.services.exceptions.MessageCodecException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.Map.Entry;
import java.util.Optional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class GatewayMessageCodec {
private static final Logger LOGGER = LoggerFactory.getLogger(GatewayMessageCodec.class);
private static final ObjectMapper objectMapper = objectMapper();
private static final MappingJsonFactory jsonFactory = new MappingJsonFactory(objectMapper);
/**
* Encode given {@code message} to given {@code byteBuf}.
*
* @param message - input message to be encoded.
* @throws MessageCodecException in case of issues during encoding.
*/
public ByteBuf encode(GatewayMessage message) throws MessageCodecException {
ByteBuf byteBuf = ByteBufAllocator.DEFAULT.buffer();
try (JsonGenerator generator =
jsonFactory.createGenerator(
(OutputStream) new ByteBufOutputStream(byteBuf), JsonEncoding.UTF8)) {
generator.writeStartObject();
// headers
for (Entry<String, String> header : message.headers().entrySet()) {
String fieldName = header.getKey();
String value = header.getValue();
if (STREAM_ID_FIELD.equals(fieldName) || SIGNAL_FIELD.equals(fieldName)) {
generator.writeNumberField(fieldName, Long.parseLong(value));
} else {
generator.writeStringField(fieldName, value);
}
}
// data
Object data = message.data();
if (data != null) {
if (data instanceof ByteBuf) {
ByteBuf dataBin = (ByteBuf) data;
if (dataBin.isReadable()) {
try {
generator.writeFieldName(DATA_FIELD);
generator.writeRaw(":");
generator.flush();
byteBuf.writeBytes(dataBin);
} finally {
ReferenceCountUtil.safestRelease(dataBin);
}
}
} else {
generator.writeObjectField(DATA_FIELD, data);
}
}
generator.writeEndObject();
} catch (Throwable ex) {
ReferenceCountUtil.safestRelease(byteBuf);
Optional.ofNullable(message.data()).ifPresent(ReferenceCountUtil::safestRelease);
LOGGER.error("Failed to encode message: {}", message, ex);
throw new MessageCodecException("Failed to encode message", ex);
}
return byteBuf;
}
/**
* Decodes {@link GatewayMessage} from given {@code byteBuf}.
*
* @param byteBuf - contains raw {@link GatewayMessage} to be decoded.
* @return Decoded {@link GatewayMessage}.
* @throws MessageCodecException - in case of issues during deserialization.
*/
public GatewayMessage decode(ByteBuf byteBuf) throws MessageCodecException {
try (InputStream stream = new ByteBufInputStream(byteBuf.slice(), true)) {
JsonParser jp = jsonFactory.createParser(stream);
GatewayMessage.Builder result = GatewayMessage.builder();
JsonToken current = jp.nextToken();
if (current != JsonToken.START_OBJECT) {
LOGGER.error("Root should be object: {}", byteBuf.toString(Charset.defaultCharset()));
throw new MessageCodecException("Root should be object", null);
}
long dataStart = 0;
long dataEnd = 0;
while ((jp.nextToken()) != JsonToken.END_OBJECT) {
String fieldName = jp.getCurrentName();
current = jp.nextToken();
if (current == VALUE_NULL) {
continue;
}
if (fieldName.equals(DATA_FIELD)) {
dataStart = jp.getTokenLocation().getByteOffset();
if (current.isScalarValue()) {
if (!current.isNumeric() && !current.isBoolean()) {
jp.getValueAsString();
}
} else if (current.isStructStart()) {
jp.skipChildren();
}
dataEnd = jp.getCurrentLocation().getByteOffset();
} else {
result.header(fieldName, jp.getValueAsString());
}
}
if (dataEnd > dataStart) {
result.data(byteBuf.copy((int) dataStart, (int) (dataEnd - dataStart)));
}
return result.build();
} catch (Throwable ex) {
LOGGER.error("Failed to decode message: {}", byteBuf.toString(Charset.defaultCharset()), ex);
throw new MessageCodecException("Failed to decode message", ex);
}
}
private static ObjectMapper objectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
mapper.configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false);
mapper.configure(DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL, true);
mapper.configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false);
mapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY);
mapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
mapper.configure(SerializationFeature.WRITE_ENUMS_USING_TO_STRING, true);
mapper.registerModule(new JavaTimeModule());
return mapper;
}
}
|
package net.malisis.core.client.gui.component.interaction;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.lang3.StringUtils;
import com.google.common.base.Converter;
import com.mojang.realmsclient.gui.ChatFormatting;
import net.malisis.core.client.gui.GuiRenderer;
import net.malisis.core.client.gui.MalisisGui;
import net.malisis.core.client.gui.component.IGuiText;
import net.malisis.core.client.gui.component.UIComponent;
import net.malisis.core.client.gui.element.GuiShape;
import net.malisis.core.client.gui.element.SimpleGuiShape;
import net.malisis.core.client.gui.element.XResizableGuiShape;
import net.malisis.core.client.gui.event.ComponentEvent;
import net.malisis.core.renderer.font.FontOptions;
import net.malisis.core.renderer.font.MalisisFont;
import net.malisis.core.renderer.icon.provider.GuiIconProvider;
import net.malisis.core.util.MouseButton;
import net.malisis.core.util.Silenced;
import net.minecraft.util.math.MathHelper;
/**
* @author Ordinastie
*
*/
public class UISlider<T> extends UIComponent<UISlider<T>> implements IGuiText<UISlider<T>>
{
public static int SLIDER_WIDTH = 8;
protected GuiIconProvider sliderIcon;
/** The {@link MalisisFont} to use for this {@link UISlider}. */
protected MalisisFont font = MalisisFont.minecraftFont;
/** The {@link FontOptions} to use for this {@link UISlider}. */
protected FontOptions fontOptions = FontOptions.builder().color(0xFFFFFF).shadow().build();
/** The {@link FontOptions} to use for this {@link UISlider} when hovered. */
protected FontOptions hoveredFontOptions = FontOptions.builder().color(0xFFFFA0).shadow().build();
/** Text to display over the slider. */
protected String text;
/** Current value. */
protected T value;
/** Position offset of the slider. */
protected float offset;
/** Amount of offset scrolled by when using the scroll wheel. */
protected float scrollStep = 0.05F;
/** Converter from float (0-1 offset) to the value. */
protected Converter<Float, T> converter;
protected GuiShape sliderShape;
public UISlider(MalisisGui gui, int width, Converter<Float, T> converter, String text)
{
super(gui);
this.text = text;
this.converter = checkNotNull(converter);
this.value = converter.convert(0F);
setSize(width, 20);
shape = new XResizableGuiShape();
sliderShape = new SimpleGuiShape();
sliderShape.setSize(8, 20);
sliderShape.storeState();
iconProvider = new GuiIconProvider(gui.getGuiTexture().getXResizableIcon(0, 0, 200, 20, 5));
sliderIcon = new GuiIconProvider(gui.getGuiTexture().getIcon(227, 46, 8, 20));
}
// public UISlider(MalisisGui gui, int width, float min, float max)
// this(gui, width, null, null);
//#region Getters/Setters
@Override
public MalisisFont getFont()
{
return font;
}
@Override
public UISlider<T> setFont(MalisisFont font)
{
this.font = font;
return this;
}
@Override
public FontOptions getFontOptions()
{
return fontOptions;
}
@Override
public UISlider<T> setFontOptions(FontOptions fro)
{
this.fontOptions = fro;
return this;
}
/**
* Sets the value for this {@link UISlider}.
*
* @param value the value
* @return this UI slider
*/
public UISlider<T> setValue(T value)
{
if (this.value == value)
return this;
if (!fireEvent(new ComponentEvent.ValueChange<>(this, this.value, value)))
return this;
this.value = value;
this.offset = MathHelper.clamp(converter.reverse().convert(value), 0, 1);
return this;
}
/**
* Gets the value for this {@link UISlider}.
*
* @return the value
*/
public T getValue()
{
return value;
}
/**
* Sets the amount of offset to scroll with the wheel.
*
* @param scrollStep the scroll step
* @return the UI slider
*/
public UISlider<T> setScrollStep(float scrollStep)
{
this.scrollStep = scrollStep;
return this;
}
//#end Getters/Setters
@Override
public boolean onClick(int x, int y)
{
slideTo(x);
return true;
}
@Override
public boolean onScrollWheel(int x, int y, int delta)
{
slideTo(offset + delta * scrollStep);
return true;
}
@Override
public boolean onDrag(int lastX, int lastY, int x, int y, MouseButton button)
{
slideTo(x);
return true;
}
/**
* Slides the slider to the specified pixel position.<br>
*
* @param x the x
*/
public void slideTo(int x)
{
int l = width - SLIDER_WIDTH;
int pos = relativeX(x);
pos = MathHelper.clamp(pos - SLIDER_WIDTH / 2, 0, l);
slideTo((float) pos / l);
}
/**
* Slides the slider to the specified offset between 0 and 1.<br>
* Sets the value relative to the offset between {@link #minValue} and {@link #maxValue}.
*
* @param offset the offset
*/
public void slideTo(float offset)
{
if (isDisabled())
return;
setValue(converter.convert(MathHelper.clamp(offset, 0, 1)));
}
@Override
public void drawBackground(GuiRenderer renderer, int mouseX, int mouseY, float partialTick)
{
renderer.drawShape(shape, rp);
}
@Override
public void drawForeground(GuiRenderer renderer, int mouseX, int mouseY, float partialTick)
{
zIndex = 0;
int ox = (int) (offset * (width - SLIDER_WIDTH));
sliderShape.resetState();
sliderShape.setPosition(ox, 0);
rp.iconProvider.set(sliderIcon);
renderer.drawShape(sliderShape, rp);
renderer.next();
//zIndex = 1;
if (!StringUtils.isEmpty(text))
{
String str = Silenced.get(() -> String.format(text, value));
if (str == null)
str = ChatFormatting.ITALIC + "Format error";
int x = (int) ((width - font.getStringWidth(str, fontOptions)) / 2);
int y = 6;
renderer.drawText(font, str, x, y, 0, isHovered() ? hoveredFontOptions : fontOptions);
}
}
}
|
package net.nunnerycode.bukkit.mythicdrops.items;
import net.nunnerycode.bukkit.mythicdrops.MythicDropsPlugin;
import net.nunnerycode.bukkit.mythicdrops.api.enchantments.MythicEnchantment;
import net.nunnerycode.bukkit.mythicdrops.api.items.ItemGenerationReason;
import net.nunnerycode.bukkit.mythicdrops.api.items.MythicItemStack;
import net.nunnerycode.bukkit.mythicdrops.api.items.NonrepairableItemStack;
import net.nunnerycode.bukkit.mythicdrops.api.items.builders.DropBuilder;
import net.nunnerycode.bukkit.mythicdrops.api.names.NameType;
import net.nunnerycode.bukkit.mythicdrops.api.tiers.Tier;
import net.nunnerycode.bukkit.mythicdrops.events.RandomItemGenerationEvent;
import net.nunnerycode.bukkit.mythicdrops.names.NameMap;
import net.nunnerycode.bukkit.mythicdrops.tiers.TierMap;
import net.nunnerycode.bukkit.mythicdrops.utils.ItemStackUtil;
import net.nunnerycode.bukkit.mythicdrops.utils.ItemUtil;
import net.nunnerycode.bukkit.mythicdrops.utils.RandomRangeUtil;
import org.apache.commons.lang.Validate;
import org.apache.commons.lang.math.NumberUtils;
import org.apache.commons.lang.math.RandomUtils;
import org.apache.commons.lang3.text.WordUtils;
import org.bukkit.Bukkit;
import org.bukkit.Color;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.enchantments.Enchantment;
import org.bukkit.enchantments.EnchantmentWrapper;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.LeatherArmorMeta;
import org.bukkit.material.MaterialData;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public final class MythicDropBuilder implements DropBuilder {
private Tier tier;
private MaterialData materialData;
private ItemGenerationReason itemGenerationReason;
private World world;
private boolean useDurability;
private boolean callEvent;
public MythicDropBuilder() {
tier = null;
itemGenerationReason = ItemGenerationReason.DEFAULT;
world = Bukkit.getServer().getWorlds().get(0);
useDurability = false;
callEvent = true;
}
public DropBuilder withCallEvent(boolean b) {
this.callEvent = b;
return this;
}
@Override
public DropBuilder withTier(Tier tier) {
this.tier = tier;
return this;
}
@Override
public DropBuilder withTier(String tierName) {
this.tier = TierMap.getInstance().get(tierName);
return this;
}
@Override
public DropBuilder withMaterialData(MaterialData materialData) {
this.materialData = materialData;
return this;
}
@Override
public DropBuilder withMaterialData(String materialDataString) {
MaterialData matData = null;
if (materialDataString.contains(";")) {
String[] split = materialDataString.split(";");
matData =
new MaterialData(NumberUtils.toInt(split[0], 0), (byte) NumberUtils.toInt(split[1], 0));
} else {
matData = new MaterialData(NumberUtils.toInt(materialDataString, 0));
}
this.materialData = matData;
return this;
}
@Override
public DropBuilder withItemGenerationReason(ItemGenerationReason reason) {
this.itemGenerationReason = reason;
return this;
}
@Override
public DropBuilder inWorld(World world) {
this.world = world;
return this;
}
@Override
public DropBuilder inWorld(String worldName) {
this.world = Bukkit.getWorld(worldName);
return this;
}
@Override
public DropBuilder useDurability(boolean b) {
this.useDurability = b;
return this;
}
@Override
public ItemStack build() {
World w = world != null ? world : Bukkit.getWorlds().get(0);
Tier t = (tier != null) ? tier : TierMap.getInstance().getRandomWithChance(w.getName());
if (t == null) {
t = TierMap.getInstance().getRandomWithChance("default");
if (t == null) {
return null;
}
}
tier = t;
MaterialData
md =
(materialData != null) ? materialData : ItemUtil.getRandomMaterialDataFromCollection
(ItemUtil.getMaterialDatasFromTier(t));
NonrepairableItemStack nis = new NonrepairableItemStack(md.getItemType(), 1, (short) 0, "");
ItemMeta im = nis.getItemMeta();
Map<Enchantment, Integer> baseEnchantmentMap = getBaseEnchantments(nis, t);
Map<Enchantment, Integer> bonusEnchantmentMap = getBonusEnchantments(nis, t);
for (Map.Entry<Enchantment, Integer> baseEnch : baseEnchantmentMap.entrySet()) {
im.addEnchant(baseEnch.getKey(), baseEnch.getValue(), true);
}
for (Map.Entry<Enchantment, Integer> bonusEnch : bonusEnchantmentMap.entrySet()) {
im.addEnchant(bonusEnch.getKey(), bonusEnch.getValue(), true);
}
if (useDurability) {
nis.setDurability(
ItemStackUtil.getDurabilityForMaterial(nis.getType(), t.getMinimumDurabilityPercentage
(), t.getMaximumDurabilityPercentage()));
}
String name = generateName(nis);
List<String> lore = generateLore(nis);
im.setDisplayName(name);
im.setLore(lore);
if (nis.getItemMeta() instanceof LeatherArmorMeta) {
((LeatherArmorMeta) im).setColor(Color.fromRGB(RandomUtils.nextInt(255),
RandomUtils.nextInt(255),
RandomUtils.nextInt(255)));
}
nis.setItemMeta(im);
if (callEvent) {
RandomItemGenerationEvent rige = new RandomItemGenerationEvent(t, nis, itemGenerationReason);
Bukkit.getPluginManager().callEvent(rige);
if (rige.isCancelled()) {
return null;
}
return rige.getItemStack();
}
return nis;
}
private Map<Enchantment, Integer> getBonusEnchantments(MythicItemStack is, Tier t) {
Validate.notNull(is, "MythicItemStack cannot be null");
Validate.notNull(t, "Tier cannot be null");
if (t.getBonusEnchantments().isEmpty()) {
return new HashMap<>();
}
Map<Enchantment, Integer> map = new HashMap<>();
int added = 0;
int attempts = 0;
int range = (int) RandomRangeUtil.randomRangeDoubleInclusive(t.getMinimumBonusEnchantments(),
t.getMaximumBonusEnchantments());
MythicEnchantment[]
array =
t.getBonusEnchantments().toArray(new MythicEnchantment[t.getBonusEnchantments()
.size()]);
while (added < range && attempts < 10) {
MythicEnchantment chosenEnch = array[RandomUtils.nextInt(array.length)];
if (chosenEnch == null || chosenEnch.getEnchantment() == null) {
attempts++;
continue;
}
Enchantment e = chosenEnch.getEnchantment();
int randLevel = (int) RandomRangeUtil.randomRangeLongInclusive(chosenEnch.getMinimumLevel(),
chosenEnch.getMaximumLevel());
if (is.containsEnchantment(e)) {
randLevel += is.getEnchantmentLevel(e);
}
if (t.isSafeBonusEnchantments() && e.canEnchantItem(is)) {
if (t.isAllowHighBonusEnchantments()) {
map.put(e, randLevel);
} else {
map.put(e, getAcceptableEnchantmentLevel(e, randLevel));
}
} else if (!t.isSafeBonusEnchantments()) {
if (t.isAllowHighBonusEnchantments()) {
map.put(e, randLevel);
} else {
map.put(e, getAcceptableEnchantmentLevel(e, randLevel));
}
} else {
continue;
}
added++;
}
return map;
}
private Map<Enchantment, Integer> getBaseEnchantments(MythicItemStack is, Tier t) {
Validate.notNull(is, "MythicItemStack cannot be null");
Validate.notNull(t, "Tier cannot be null");
if (t.getBaseEnchantments().isEmpty()) {
return new HashMap<>();
}
Map<Enchantment, Integer> map = new HashMap<>();
for (MythicEnchantment me : t.getBaseEnchantments()) {
if (me == null || me.getEnchantment() == null) {
continue;
}
Enchantment e = me.getEnchantment();
int minimumLevel = Math.max(me.getMinimumLevel(), e.getStartLevel());
int maximumLevel = Math.min(me.getMaximumLevel(), e.getMaxLevel());
if (t.isSafeBaseEnchantments() && e.canEnchantItem(is)) {
if (t.isAllowHighBaseEnchantments()) {
map.put(e, (int) RandomRangeUtil.randomRangeLongInclusive
(minimumLevel, maximumLevel));
} else {
map.put(e, getAcceptableEnchantmentLevel(e,
(int) RandomRangeUtil
.randomRangeLongInclusive(minimumLevel,
maximumLevel)));
}
} else if (!t.isSafeBaseEnchantments()) {
map.put(e, (int) RandomRangeUtil.randomRangeLongInclusive
(minimumLevel, maximumLevel));
}
}
return map;
}
private int getAcceptableEnchantmentLevel(Enchantment ench, int level) {
EnchantmentWrapper ew = new EnchantmentWrapper(ench.getId());
return Math.max(Math.min(level, ew.getMaxLevel()), ew.getStartLevel());
}
private List<String> generateLore(ItemStack itemStack) {
List<String> lore = new ArrayList<String>();
if (itemStack == null || tier == null) {
return lore;
}
List<String>
tooltipFormat =
MythicDropsPlugin.getInstance().getConfigSettings().getTooltipFormat();
String minecraftName = getMinecraftMaterialName(itemStack.getData().getItemType());
String mythicName = getMythicMaterialName(itemStack.getData());
String itemType = getItemTypeName(ItemUtil.getItemTypeFromMaterialData(itemStack.getData()));
String
materialType =
getItemTypeName(ItemUtil.getMaterialTypeFromMaterialData(itemStack.getData()));
String tierName = tier.getDisplayName();
ItemMeta itemMeta = itemStack.getItemMeta();
String enchantment = getEnchantmentTypeName(itemMeta);
if (MythicDropsPlugin.getInstance().getConfigSettings().isRandomLoreEnabled()
&& RandomUtils.nextDouble() <
MythicDropsPlugin.getInstance().getConfigSettings().getRandomLoreChance()) {
String generalLoreString = NameMap.getInstance().getRandom(NameType.GENERAL_LORE, "");
String materialLoreString = NameMap.getInstance().getRandom(NameType.MATERIAL_LORE,
itemStack.getType().name()
.toLowerCase());
String
tierLoreString =
NameMap.getInstance().getRandom(NameType.TIER_LORE, tier.getName().toLowerCase());
String enchantmentLoreString = NameMap.getInstance().getRandom(NameType.ENCHANTMENT_LORE,
enchantment != null
? enchantment.toLowerCase()
: "");
List<String> generalLore = null;
if (generalLoreString != null && !generalLoreString.isEmpty()) {
generalLore = Arrays.asList(generalLoreString.replace('&',
'\u00A7').replace("\u00A7\u00A7", "&")
.split("/n"));
}
List<String> materialLore = null;
if (materialLoreString != null && !materialLoreString.isEmpty()) {
materialLore =
Arrays.asList(materialLoreString.replace('&', '\u00A7').replace("\u00A7\u00A7",
"&").split("/n"));
}
List<String> tierLore = null;
if (tierLoreString != null && !tierLoreString.isEmpty()) {
tierLore = Arrays.asList(tierLoreString.replace('&', '\u00A7').replace("\u00A7\u00A7",
"&").split("/n"));
}
List<String> enchantmentLore = null;
if (enchantmentLoreString != null && !enchantmentLoreString.isEmpty()) {
enchantmentLore = Arrays.asList(enchantmentLoreString.replace('&',
'\u00A7')
.replace("\u00A7\u00A7", "&").split("/n"));
}
if (generalLore != null && !generalLore.isEmpty()) {
lore.addAll(generalLore);
}
if (materialLore != null && !materialLore.isEmpty()) {
lore.addAll(materialLore);
}
if (tierLore != null && !tierLore.isEmpty()) {
lore.addAll(tierLore);
}
if (enchantmentLore != null && !enchantmentLore.isEmpty()) {
lore.addAll(enchantmentLore);
}
}
for (String s : tooltipFormat) {
String line = s;
line = line.replace("%basematerial%", minecraftName != null ? minecraftName : "");
line = line.replace("%mythicmaterial%", mythicName != null ? mythicName : "");
line = line.replace("%itemtype%", itemType != null ? itemType : "");
line = line.replace("%materialtype%", materialType != null ? materialType : "");
line = line.replace("%tiername%", tierName != null ? tierName : "");
line = line.replace("%enchantment%", enchantment != null ? enchantment : "");
line = line.replace("%tiercolor%", tier.getDisplayColor() + "");
line = line.replace('&', '\u00A7').replace("\u00A7\u00A7", "&");
lore.add(line);
}
for (String s : tier.getBaseLore()) {
String line = s;
line = line.replace("%basematerial%", minecraftName != null ? minecraftName : "");
line = line.replace("%mythicmaterial%", mythicName != null ? mythicName : "");
line = line.replace("%itemtype%", itemType != null ? itemType : "");
line = line.replace("%materialtype%", materialType != null ? materialType : "");
line = line.replace("%tiername%", tierName != null ? tierName : "");
line = line.replace("%enchantment%", enchantment != null ? enchantment : "");
line = line.replace("%tiercolor%", tier.getDisplayColor() + "");
line = line.replace('&', '\u00A7').replace("\u00A7\u00A7", "&");
String[] strings = line.split("/n");
lore.addAll(Arrays.asList(strings));
}
int numOfBonusLore = (int) RandomRangeUtil.randomRangeLongInclusive(tier.getMinimumBonusLore(),
tier.getMaximumBonusLore());
List<String> chosenLore = new ArrayList<>();
for (int i = 0; i < numOfBonusLore; i++) {
if (tier.getBonusLore() == null || tier.getBonusLore().isEmpty() || chosenLore.size() == tier
.getBonusLore().size()) {
continue;
}
// choose a random String out of the tier's bonus lore
String s = tier.getBonusLore().get(RandomUtils.nextInt(tier.getBonusLore().size()));
if (chosenLore.contains(s)) {
i
continue;
}
chosenLore.add(s);
// split on the next line /n
String[] strings = s.replace('&', '\u00A7').replace("\u00A7\u00A7", "&").split("/n");
// add to lore by wrapping in Arrays.asList(Object...)
lore.addAll(Arrays.asList(strings));
}
if (MythicDropsPlugin.getInstance().getSockettingSettings().isEnabled()
&& RandomUtils.nextDouble() < tier
.getChanceToHaveSockets()) {
int numberOfSockets = (int) RandomRangeUtil.randomRangeLongInclusive(tier.getMinimumSockets(),
tier.getMaximumSockets());
for (int i = 0; i < numberOfSockets; i++) {
String line = MythicDropsPlugin.getInstance().getSockettingSettings().getSockettedItemString();
line = line.replace("%basematerial%", minecraftName != null ? minecraftName : "");
line = line.replace("%mythicmaterial%", mythicName != null ? mythicName : "");
line = line.replace("%itemtype%", itemType != null ? itemType : "");
line = line.replace("%materialtype%", materialType != null ? materialType : "");
line = line.replace("%tiername%", tierName != null ? tierName : "");
line = line.replace("%enchantment%", enchantment != null ? enchantment : "");
line = line.replace("%tiercolor%", tier.getDisplayColor() + "");
line = line.replace('&', '\u00A7').replace("\u00A7\u00A7", "&");
lore.add(line);
}
if (numberOfSockets > 0) {
for (String s : MythicDropsPlugin.getInstance().getSockettingSettings()
.getSockettedItemLore()) {
String line = s;
line = line.replace("%basematerial%", minecraftName != null ? minecraftName : "");
line = line.replace("%mythicmaterial%", mythicName != null ? mythicName : "");
line = line.replace("%itemtype%", itemType != null ? itemType : "");
line = line.replace("%materialtype%", materialType != null ? materialType : "");
line = line.replace("%tiername%", tierName != null ? tierName : "");
line = line.replace("%enchantment%", enchantment != null ? enchantment : "");
line = line.replace("%tiercolor%", tier.getDisplayColor() + "");
line = line.replace('&', '\u00A7').replace("\u00A7\u00A7", "&");
lore.add(line);
}
}
}
return lore;
}
private String getEnchantmentTypeName(ItemMeta itemMeta) {
Enchantment enchantment = ItemStackUtil.getHighestEnchantment(itemMeta);
if (enchantment == null) {
return MythicDropsPlugin.getInstance().getConfigSettings()
.getFormattedLanguageString("displayNames" +
".Ordinary");
}
String
ench =
MythicDropsPlugin.getInstance().getConfigSettings()
.getFormattedLanguageString("displayNames."
+ enchantment.getName());
if (ench != null) {
return ench;
}
return "Ordinary";
}
private String getMythicMaterialName(MaterialData matData) {
String comb =
String.format("%s;%s", String.valueOf(matData.getItemTypeId()),
String.valueOf(matData.getData()));
String comb2;
if (matData.getData() == (byte) 0) {
comb2 = String.valueOf(matData.getItemTypeId());
} else {
comb2 = comb;
}
String
mythicMatName =
MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString(
"displayNames." + comb.toLowerCase());
if (mythicMatName == null || mythicMatName.equals("displayNames." + comb.toLowerCase())) {
mythicMatName =
MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString(
"displayNames." + comb2.toLowerCase());
if (mythicMatName == null || mythicMatName.equals("displayNames." + comb2.toLowerCase())) {
mythicMatName = getMinecraftMaterialName(matData.getItemType());
}
}
return WordUtils.capitalize(mythicMatName);
}
private String getMinecraftMaterialName(Material material) {
String prettyMaterialName = "";
String matName = material.name();
String[] split = matName.split("_");
for (String s : split) {
if (s.equals(split[split.length - 1])) {
prettyMaterialName = String
.format("%s%s%s", prettyMaterialName, s.substring(0, 1).toUpperCase(), s.substring(1,
s.length())
.toLowerCase());
} else {
prettyMaterialName = prettyMaterialName
+ (String
.format("%s%s", s.substring(0, 1).toUpperCase(), s.substring(1,
s.length())
.toLowerCase())) + " ";
}
}
return WordUtils.capitalizeFully(prettyMaterialName);
}
private String getItemTypeName(String itemType) {
if (itemType == null) {
return null;
}
String
mythicMatName =
MythicDropsPlugin.getInstance().getConfigSettings().getFormattedLanguageString(
"displayNames." + itemType.toLowerCase());
if (mythicMatName == null) {
mythicMatName = itemType;
}
return WordUtils.capitalizeFully(mythicMatName);
}
private String getItemTypeFromMaterialData(MaterialData matData) {
String comb =
String.format("%s;%s", String.valueOf(matData.getItemTypeId()),
String.valueOf(matData.getData()));
String comb2;
if (matData.getData() == (byte) 0) {
comb2 = String.valueOf(matData.getItemTypeId());
} else {
comb2 = comb;
}
String comb3 = String.valueOf(matData.getItemTypeId());
Map<String, List<String>> ids = new HashMap<String, List<String>>();
ids.putAll(MythicDropsPlugin.getInstance().getConfigSettings().getItemTypesWithIds());
for (Map.Entry<String, List<String>> e : ids.entrySet()) {
if (e.getValue().contains(comb)
|| e.getValue().contains(comb2) || e.getValue().contains(comb3)) {
if (MythicDropsPlugin.getInstance().getConfigSettings().getMaterialTypes()
.contains(e.getKey())) {
continue;
}
return e.getKey();
}
}
return null;
}
private String generateName(ItemStack itemStack) {
Validate.notNull(itemStack, "ItemStack cannot be null");
Validate.notNull(tier, "Tier cannot be null");
String format = MythicDropsPlugin.getInstance().getConfigSettings().getItemDisplayNameFormat();
if (format == null) {
return "Mythic Item";
}
String minecraftName = getMinecraftMaterialName(itemStack.getData().getItemType());
String mythicName = getMythicMaterialName(itemStack.getData());
String generalPrefix = NameMap.getInstance().getRandom(NameType.GENERAL_PREFIX, "");
String generalSuffix = NameMap.getInstance().getRandom(NameType.GENERAL_SUFFIX, "");
String materialPrefix = NameMap.getInstance().getRandom(NameType.MATERIAL_PREFIX,
itemStack.getType().name()
.toLowerCase());
String materialSuffix = NameMap.getInstance().getRandom(NameType.MATERIAL_SUFFIX,
itemStack.getType().name()
.toLowerCase());
String
tierPrefix =
NameMap.getInstance().getRandom(NameType.TIER_PREFIX, tier.getName().toLowerCase());
String
tierSuffix =
NameMap.getInstance().getRandom(NameType.TIER_SUFFIX, tier.getName().toLowerCase());
String itemType = ItemUtil.getItemTypeFromMaterialData(itemStack.getData());
String materialType = ItemUtil.getMaterialTypeFromMaterialData(itemStack.getData());
String tierName = tier.getDisplayName();
String enchantment = getEnchantmentTypeName(itemStack.getItemMeta());
Enchantment highestEnch = ItemStackUtil.getHighestEnchantment(itemStack.getItemMeta());
String enchantmentPrefix = NameMap.getInstance().getRandom(NameType.ENCHANTMENT_PREFIX,
highestEnch != null ? highestEnch
.getName().toLowerCase() : "");
String enchantmentSuffix = NameMap.getInstance().getRandom(NameType.ENCHANTMENT_SUFFIX,
highestEnch != null ? highestEnch
.getName().toLowerCase() : "");
String name = format;
if (name.contains("%basematerial%")) {
name = name.replace("%basematerial%", minecraftName);
}
if (name.contains("%mythicmaterial%")) {
name = name.replace("%mythicmaterial%", mythicName);
}
if (name.contains("%generalprefix%")) {
name = name.replace("%generalprefix%", generalPrefix);
}
if (name.contains("%generalsuffix%")) {
name = name.replace("%generalsuffix%", generalSuffix);
}
if (name.contains("%materialprefix%")) {
name = name.replace("%materialprefix%", materialPrefix);
}
if (name.contains("%materialsuffix%")) {
name = name.replace("%materialsuffix%", materialSuffix);
}
if (name.contains("%tierprefix%")) {
name = name.replace("%tierprefix%", tierPrefix);
}
if (name.contains("%tiersuffix%")) {
name = name.replace("%tiersuffix%", tierSuffix);
}
if (name.contains("%itemtype%")) {
name = name.replace("%itemtype%", itemType);
}
if (name.contains("%materialtype%")) {
name = name.replace("%materialtype%", materialType);
}
if (name.contains("%tiername%")) {
name = name.replace("%tiername%", tierName);
}
if (name.contains("%enchantment%")) {
name = name.replace("%enchantment%", enchantment);
}
if (name.contains("%enchantmentprefix%")) {
name = name.replace("%enchantmentprefix%", enchantmentPrefix);
}
if (name.contains("%enchantmentsuffix%")) {
name = name.replace("%enchantmentsuffix%", enchantmentSuffix);
}
return tier.getDisplayColor() + name.replace('&', '\u00A7').replace("\u00A7\u00A7", "&").trim()
+
tier.getIdentificationColor();
}
}
|
package nl.tudelft.lifetiles.graph.models.sequence;
import java.util.Set;
import nl.tudelft.lifetiles.graph.view.Mutation;
/**
* @author Rutger van den Berg Contains a partial sequence.
*/
public class SequenceSegment implements Comparable<SequenceSegment> {
/**
* The content of this segment.
*/
private SegmentContent contentVar;
/**
* The end position for this segment.
*/
private long endVar;
/**
* Contains the sources containing this segment.
*/
private Set<Sequence> sourcesVar;
/**
* The start position for this segment.
*/
private long startVar;
/**
* The absolute start position for this segment.
*/
private long absStartVar = 0;
/**
* The absolute end position for this segment.
*/
private long absEndVar = Long.MAX_VALUE;
/**
* The mutation annotation of this segment.
*/
private Mutation mutationVar;
/**
* @param sources
* The sources containing this segment.
* @param startPosition
* The start position for this segment.
* @param endPosition
* The end position for this segment.
* @param content
* The content for this segment.
*/
public SequenceSegment(final Set<Sequence> sources,
final long startPosition, final long endPosition,
final SegmentContent content) {
sourcesVar = sources;
startVar = startPosition;
endVar = endPosition;
contentVar = content;
}
/**
* @return the content
*/
public final SegmentContent getContent() {
return contentVar;
}
/**
* @return the end position
*/
public final long getEnd() {
return endVar;
}
/**
* @return the sources
*/
public final Set<Sequence> getSources() {
return sourcesVar;
}
/**
* @return the start position
*/
public final long getStart() {
return startVar;
}
/**
* @return the absolute start position
*/
public final long getAbsStart() {
return absStartVar;
}
/**
* @param absStart
* absolute start position of this sequence segment.
*/
public final void setAbsStart(final long absStart) {
absStartVar = absStart;
}
/**
* @return the absolute end position
*/
public final long getAbsEnd() {
return absEndVar;
}
/**
* @param absEnd
* absolute end position of this sequence segment.
*/
public final void setAbsEnd(final long absEnd) {
absEndVar = absEnd;
}
/**
* @return mutation annotation of sequence segment.
*/
public final Mutation getMutation() {
return mutationVar;
}
/**
* @param mutation
* Mutation which is annotated onto the sequence segment.
*/
public final void setMutation(final Mutation mutation) {
mutationVar = mutation;
}
/**
* Returns the distance between this sequence segment and another.
* (non-euclidian distance)
*
* @param other
* Sequence segment which needs to be compared.
* @return
* Distance between this sequence and other sequence.
*/
public final long distanceTo(final SequenceSegment other) {
return other.getStart() - getEnd() - 1;
}
/**
* Compares the the start position of this and another sequence segment.
*
* @param other
* Sequence segment which needs to be compared.
* @return the compare value of the start positions.
*/
@Override
public final int compareTo(final SequenceSegment other) {
return ((Long) startVar).compareTo(other.getStart());
}
}
|
package nl.tudelft.lifetiles.graph.models.sequence;
import java.util.Iterator;
import java.util.Set;
import nl.tudelft.lifetiles.graph.view.Mutation;
/**
* @author Rutger van den Berg Contains a partial sequence.
*/
public class SequenceSegment implements Comparable<SequenceSegment> {
/**
* The content of this segment.
*/
private SegmentContent contentVar;
/**
* The end position for this segment.
*/
private long endVar;
/**
* Contains the sources containing this segment.
*/
private Set<Sequence> sourcesVar;
/**
* The start position for this segment.
*/
private long startVar;
/**
* The absolute start position for this segment.
*/
private long absStartVar = 0;
/**
* The absolute end position for this segment.
*/
private long absEndVar = Long.MAX_VALUE;
/**
* The mutation annotation of this segment.
*/
private Mutation mutationVar;
/**
* @param sources
* The sources containing this segment.
* @param startPosition
* The start position for this segment.
* @param endPosition
* The end position for this segment.
* @param content
* The content for this segment.
*/
public SequenceSegment(final Set<Sequence> sources,
final long startPosition, final long endPosition,
final SegmentContent content) {
sourcesVar = sources;
startVar = startPosition;
endVar = endPosition;
contentVar = content;
}
/**
* @return the content
*/
public final SegmentContent getContent() {
return contentVar;
}
/**
* @return the end position
*/
public final long getEnd() {
return endVar;
}
/**
* @return the sources
*/
public final Set<Sequence> getSources() {
return sourcesVar;
}
/**
* @return the start position
*/
public final long getStart() {
return startVar;
}
/**
* @return the absolute start position
*/
public final long getAbsStart() {
return absStartVar;
}
/**
* @param absStart
* absolute start position of this sequence segment.
*/
public final void setAbsStart(final long absStart) {
absStartVar = absStart;
}
/**
* @return the absolute end position
*/
public final long getAbsEnd() {
return absEndVar;
}
/**
* @param absEnd
* absolute end position of this sequence segment.
*/
public final void setAbsEnd(final long absEnd) {
absEndVar = absEnd;
}
/**
* @return mutation annotation of sequence segment.
*/
public final Mutation getMutation() {
return mutationVar;
}
/**
* @param mutation
* Mutation which is annotated onto the sequence segment.
*/
public final void setMutation(final Mutation mutation) {
mutationVar = mutation;
}
/**
* Returns the distance between this sequence segment and another.
* (non-euclidian distance)
*
* @param other
* Sequence segment which needs to be compared.
* @return
* Distance between this sequence and other sequence.
*/
public final long distanceTo(final SequenceSegment other) {
return other.getStart() - getEnd() - 1;
}
/**
* Compares two segments, first by start positions, then end positions, then
* content, then sources.
*
* @param other
* Sequence segment which needs to be compared.
* @return the compare value of the start positions.
*/
@Override
public final int compareTo(final SequenceSegment other) {
int candidateComp = Long.compare(this.getAbsStart(),
other.getAbsStart());
if (candidateComp == 0) {
candidateComp = Long.compare(this.getStart(), other.getStart());
}
if (candidateComp == 0) {
candidateComp = Long.compare(this.getAbsEnd(), other.getAbsEnd());
}
if (candidateComp == 0) {
candidateComp = Long.compare(this.getEnd(), other.getEnd());
}
if (candidateComp == 0) {
candidateComp = this.getContent().toString()
.compareTo(other.getContent().toString());
}
if (candidateComp == 0) {
candidateComp = this.getSources().size()
- other.getSources().size();
}
if (candidateComp == 0) {
Iterator<Sequence> thisIt = this.getSources().iterator();
Iterator<Sequence> otherIt = other.getSources().iterator();
while (thisIt.hasNext()) {
if (candidateComp == 0) {
candidateComp = thisIt.next().getIdentifier()
.compareTo(otherIt.next().getIdentifier());
}
}
}
return candidateComp;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public final int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result;
result += (int) (absEndVar ^ (absEndVar >>> prime + 1));
result = prime * result
+ (int) (absStartVar ^ (absStartVar >>> prime + 1));
result = prime * result;
if (contentVar != null) {
result += contentVar.hashCode();
}
result = prime * result + (int) (endVar ^ (endVar >>> prime + 1));
result = prime * result;
if (mutationVar != null) {
result += mutationVar.hashCode();
}
result = prime * result;
if (sourcesVar != null) {
result += sourcesVar.hashCode();
}
result = prime * result + (int) (startVar ^ (startVar >>> prime + 1));
return result;
}
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public final boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof SequenceSegment)) {
return false;
}
SequenceSegment other = (SequenceSegment) obj;
if (absEndVar != other.absEndVar) {
return false;
}
if (absStartVar != other.absStartVar) {
return false;
}
if (contentVar == null) {
if (other.contentVar != null) {
return false;
}
} else if (!contentVar.equals(other.contentVar)) {
return false;
}
if (endVar != other.endVar) {
return false;
}
if (sourcesVar == null) {
if (other.sourcesVar != null) {
return false;
}
} else if (!sourcesVar.equals(other.sourcesVar)) {
return false;
}
if (startVar != other.startVar) {
return false;
}
return true;
}
}
|
package org.albaross.agents4j.learning.common;
import java.util.List;
import java.util.Map;
import org.albaross.agents4j.core.Agent;
import org.albaross.agents4j.learning.MDPWrapper;
import org.albaross.agents4j.learning.RLEnvironment;
import org.deeplearning4j.rl4j.mdp.MDP;
import org.deeplearning4j.rl4j.space.DiscreteSpace;
import org.deeplearning4j.rl4j.space.ObservationSpace;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class CubeworldEnvironment extends RLEnvironment<Location3D, Direction3D> implements MDPWrapper<Location3D, Direction3D> {
private static final Logger LOG = LoggerFactory.getLogger(CubeworldEnvironment.class);
protected int width, height, depth;
public CubeworldEnvironment(List<Agent<Location3D, Direction3D>> agents, Map<Location3D, Double> rewards, Location3D start, Location3D goal,
int width, int height, int depth) {
super(agents, rewards, start, goal);
this.width = width;
this.height = height;
this.depth = depth;
}
@Override
protected Location3D[] ara(int size) {
return new Location3D[size];
}
@Override
public Location3D nextState(Location3D current, Direction3D action) {
switch (action) {
case UP:
return current.y < height - 1 ? new Location3D(current.x, current.y + 1, current.z) : current;
case DOWN:
return current.y > 0 ? new Location3D(current.x, current.y - 1, current.z) : current;
case RIGHT:
return current.x < width - 1 ? new Location3D(current.x + 1, current.y, current.z) : current;
case LEFT:
return current.x > 0 ? new Location3D(current.x - 1, current.y, current.z) : current;
case BACK:
return current.z < depth - 1 ? new Location3D(current.x, current.y, current.z + 1) : current;
case FORTH:
return current.z > 0 ? new Location3D(current.x, current.y, current.z - 1) : current;
default:
LOG.warn("unknown action");
return current;
}
}
protected final DiscreteSpace actions = new DiscreteSpace(6);
@Override
public DiscreteSpace getActionSpace() {
return actions;
}
@Override
public ObservationSpace<Location3D> getObservationSpace() {
// TODO Auto-generated method stub
return null;
}
@Override
public MDP<Location3D, Integer, DiscreteSpace> newInstance() {
return new CubeworldEnvironment(agents, rewards, start, goal, width, height, depth);
}
@Override
public RLEnvironment<Location3D, Direction3D> env() {
return this;
}
@Override
public Direction3D decode(Integer action) {
return Direction3D.values()[action];
}
}
|
package org.eclipse.che.ide.flux.liveedit;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.che.api.machine.gwt.client.MachineManager;
import org.eclipse.che.api.machine.gwt.client.MachineServiceClient;
import org.eclipse.che.api.machine.shared.dto.MachineProcessDto;
import org.eclipse.che.api.machine.shared.dto.event.MachineProcessEvent;
import org.eclipse.che.api.promises.client.Operation;
import org.eclipse.che.api.promises.client.OperationException;
import org.eclipse.che.api.promises.client.Promise;
import org.eclipse.che.ide.api.app.AppContext;
import org.eclipse.che.ide.api.extension.Extension;
import org.eclipse.che.ide.extension.machine.client.command.CommandManager;
import org.eclipse.che.ide.extension.machine.client.command.valueproviders.CommandPropertyValueProvider;
import org.eclipse.che.ide.extension.machine.client.command.valueproviders.CommandPropertyValueProviderRegistry;
import org.eclipse.che.ide.jseditor.client.document.Document;
import org.eclipse.che.ide.jseditor.client.document.DocumentHandle;
import org.eclipse.che.ide.jseditor.client.events.DocumentChangeEvent;
import org.eclipse.che.ide.jseditor.client.events.DocumentChangeHandler;
import org.eclipse.che.ide.jseditor.client.events.DocumentReadyEvent;
import org.eclipse.che.ide.jseditor.client.events.DocumentReadyHandler;
import org.eclipse.che.ide.jseditor.client.text.TextPosition;
import org.eclipse.che.ide.project.event.ProjectExplorerLoadedEvent;
import org.eclipse.che.ide.rest.DtoUnmarshallerFactory;
import org.eclipse.che.ide.socketio.Consumer;
import org.eclipse.che.ide.socketio.SocketIOOverlay;
import org.eclipse.che.ide.socketio.SocketIOResources;
import org.eclipse.che.ide.socketio.SocketOverlay;
import org.eclipse.che.ide.util.loging.Log;
import org.eclipse.che.ide.websocket.MessageBus;
import org.eclipse.che.ide.websocket.MessageBusProvider;
import org.eclipse.che.ide.websocket.WebSocketException;
import org.eclipse.che.ide.websocket.events.MessageHandler;
import org.eclipse.che.ide.websocket.rest.SubscriptionHandler;
import org.eclipse.che.ide.websocket.rest.Unmarshallable;
import com.google.gwt.core.client.JsonUtils;
import com.google.gwt.core.client.ScriptInjector;
import com.google.gwt.core.shared.GWT;
import com.google.gwt.user.client.Timer;
import com.google.inject.Inject;
import com.google.web.bindery.event.shared.EventBus;
@Extension(title = "Che Flux extension", version = "1.0.0")
public class CheFluxLiveEditExtension {
private Map<String, Document> liveDocuments = new HashMap<String, Document>();
private SocketOverlay socket;
private boolean isUpdatingModel = false;
private MessageBus messageBus;
private CommandManager commandManager;
private CommandPropertyValueProviderRegistry commandPropertyValueProviderRegistry;
@Inject
public CheFluxLiveEditExtension(final MessageBusProvider messageBusProvider,
final EventBus eventBus,
final MachineManager machineManager,
final MachineServiceClient machineServiceClient,
final DtoUnmarshallerFactory dtoUnmarshallerFactory,
final AppContext appContext,
final CommandManager commandManager,
final CommandPropertyValueProviderRegistry commandPropertyValueProviderRegistry) {
this.commandManager = commandManager;
this.messageBus = messageBusProvider.getMessageBus();
this.commandPropertyValueProviderRegistry = commandPropertyValueProviderRegistry;
injectSocketIO();
eventBus.addHandler(ProjectExplorerLoadedEvent.getType(), new ProjectExplorerLoadedEvent.ProjectExplorerLoadedHandler() {
@Override
public void onProjectsLoaded(ProjectExplorerLoadedEvent event) {
String machineId = appContext.getDevMachineId();
Promise<List<MachineProcessDto>> processesPromise = machineServiceClient.getProcesses(machineId);
processesPromise.then(new Operation<List<MachineProcessDto>>() {
@Override
public void apply(final List<MachineProcessDto> descriptors) throws OperationException {
if (descriptors.isEmpty()) {
return;
}
Log.info(getClass(), "machine processes");
for (MachineProcessDto machineProcessDto : descriptors) {
Log.info(getClass(), " - " + machineProcessDto);
if (connectIfFluxMicroservice(machineProcessDto)) {
break;
}
}
}
});
}
});
String machineId = appContext.getDevMachineId();
final Unmarshallable<MachineProcessEvent> unmarshaller = dtoUnmarshallerFactory.newWSUnmarshaller(MachineProcessEvent.class);
final String processStateChannel = "machine:process:" + machineId;
final MessageHandler handler = new SubscriptionHandler<MachineProcessEvent>(unmarshaller) {
@Override
protected void onMessageReceived(MachineProcessEvent result) {
Log.info(getClass(), "process event" + result);
if (MachineProcessEvent.EventType.STARTED.equals(result.getEventType())) {
final int processId = result.getProcessId();
machineServiceClient.getProcesses(appContext.getDevMachineId()).then(new Operation<List<MachineProcessDto>>() {
@Override
public void apply(List<MachineProcessDto> descriptors) throws OperationException {
if (descriptors.isEmpty()) {
return;
}
for (final MachineProcessDto machineProcessDto : descriptors) {
if (machineProcessDto.getPid() == processId) {
Log.info(getClass(), "Started Process" + machineProcessDto);
new Timer() {
@Override
public void run() {
if (connectIfFluxMicroservice(machineProcessDto)) {
// break;
}
}
}.schedule(8000);
return;
}
}
}
});
}
}
@Override
protected void onErrorReceived(Throwable exception) {
Log.error(getClass(), exception);
}
};
wsSubscribe(processStateChannel, handler);
eventBus.addHandler(DocumentReadyEvent.TYPE, new DocumentReadyHandler() {
@Override
public void onDocumentReady(DocumentReadyEvent event) {
liveDocuments.put(event.getDocument().getFile().getPath(), event.getDocument());
final DocumentHandle documentHandle = event.getDocument().getDocumentHandle();
documentHandle.getDocEventBus().addHandler(DocumentChangeEvent.TYPE, new DocumentChangeHandler() {
@Override
public void onDocumentChange(DocumentChangeEvent event) {
if (socket != null) {
// full path start with /, so substring
String fullPath = event.getDocument().getDocument().getFile().getPath().substring(1);
String project = fullPath.substring(0, fullPath.indexOf('/'));
String resource = fullPath.substring(fullPath.indexOf('/') + 1);
String text = JsonUtils.escapeValue(event.getText());
String json = "{"
+ "\"username\":\"USER\","
+ "\"project\":\"" + project + "\","
+ "\"resource\":\"" + resource + "\","
+ "\"offset\":" + event.getOffset() + ","
+ "\"removedCharCount\":" + event.getRemoveCharCount() + ","
+ "\"addedCharacters\": " + text
+ "}";
if (isUpdatingModel) {
return;
}
socket.emit("liveResourceChanged", JsonUtils.unsafeEval(json));
}
}
});
}
});
}
private void injectSocketIO() {
SocketIOResources ioresources = GWT.create(SocketIOResources.class);
ScriptInjector.fromString(ioresources.socketIo().getText()).setWindow(ScriptInjector.TOP_WINDOW).inject();
}
private boolean connectIfFluxMicroservice(MachineProcessDto descriptor) {
if (descriptor == null) {
return false;
}
if ("flux".equals(descriptor.getName())) {
String urlToSubstitute = "http://${server.port.3000/tcp}";
if (commandPropertyValueProviderRegistry == null) {
return false;
}
substituteAndConnect(urlToSubstitute);
return true;
}
return false;
}
int retryConnectToFlux = 5;
protected void connectToFlux(final String url) {
final SocketIOOverlay io = getSocketIO();
Log.info(getClass(), "connecting to " + url);
socket = io.connect(url);
socket.on("error", new Runnable() {
@Override
public void run() {
Log.info(getClass(), "error connecting to " + url);
}
});
socket.on("liveResourceChanged", new Consumer<FluxResourceChangedEventDataOverlay>() {
@Override
public void accept(FluxResourceChangedEventDataOverlay event) {
Document document = liveDocuments.get("/" + event.getProject() + "/" + event.getResource());
if (document == null) {
return;
}
String addedCharacters = event.getAddedCharacters();
isUpdatingModel = true;
TextPosition cursorPosition = document.getCursorPosition();
document.replace(event.getOffset(), event.getRemovedCharCount(), addedCharacters);
document.setCursorPosition(cursorPosition);
isUpdatingModel = false;
}
});
socket.emit("connectToChannel", JsonUtils.safeEval("{\"channel\" : \"USER\"}"));
}
public static native SocketIOOverlay getSocketIO()/*-{
return $wnd.io;
}-*/;
private void wsSubscribe(String wsChannel, MessageHandler handler) {
try {
messageBus.subscribe(wsChannel, handler);
} catch (WebSocketException e) {
Log.error(getClass(), e);
}
}
int trySubstitude = 10;
public void substituteAndConnect(final String commandLine) {
try {
String cmdLine = commandLine;
List<CommandPropertyValueProvider> providers = commandPropertyValueProviderRegistry.getProviders();
for (CommandPropertyValueProvider provider : providers) {
cmdLine = cmdLine.replace(provider.getKey(), provider.getValue());
}
connectToFlux(cmdLine);
return;
} catch (Exception e) {
Log.info(getClass(), e, "retrying " + trySubstitude
if (trySubstitude > 0) {
new Timer() {
@Override
public void run() {
substituteAndConnect(commandLine);
}
}.schedule(1000);
return;
}
throw e;
}
}
}
|
package org.jbpm.simulation.impl;
import java.util.ArrayList;
import java.util.List;
import org.jbpm.simulation.AggregatedSimulationEvent;
import org.jbpm.simulation.SimulationEvent;
import org.jbpm.simulation.impl.events.AggregatedActivitySimulationEvent;
import org.jbpm.simulation.impl.events.AggregatedProcessSimulationEvent;
import org.kie.api.io.Resource;
import org.kie.api.io.ResourceType;
import org.kie.api.runtime.KieSession;
import org.kie.internal.builder.KnowledgeBuilder;
import org.kie.internal.builder.KnowledgeBuilderFactory;
import org.kie.internal.io.ResourceFactory;
public class WorkingMemorySimulationRepository extends InMemorySimulationRepository {
private KieSession ksession;
private boolean fireRulesOnStore = false;
public WorkingMemorySimulationRepository() {
}
public WorkingMemorySimulationRepository(String... rules) {
this(false, rules);
}
public WorkingMemorySimulationRepository(Resource... rules) {
this(false, rules);
}
public WorkingMemorySimulationRepository(boolean fireRulesOnStore, Resource... rules) {
this.fireRulesOnStore = fireRulesOnStore;
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
for (Resource path : rules) {
kbuilder.add(path, ResourceType.DRL);
}
if (kbuilder.hasErrors()) {
throw new RuntimeException("Error while building knowledge base: " + kbuilder.getErrors());
}
this.ksession = kbuilder.newKnowledgeBase().newKieSession();
try {
// register global for aggregated events
ksession.setGlobal("simulation", new ArrayList<AggregatedActivitySimulationEvent>());
ksession.setGlobal("summary", new ArrayList<AggregatedActivitySimulationEvent>());
AggregatedProcessSimulationEvent init = new AggregatedProcessSimulationEvent("", 0, 0, 0);
List processOnlyList = new ArrayList<AggregatedSimulationEvent>();
processOnlyList.add(init);
ksession.setGlobal("processEventsOnly", processOnlyList);
} catch (Exception e) {
// catch it as there could be no simulation global declared
}
}
public WorkingMemorySimulationRepository(boolean fireRulesOnStore, String... rules) {
this.fireRulesOnStore = fireRulesOnStore;
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
for (String path : rules) {
kbuilder.add(ResourceFactory.newClassPathResource(path), ResourceType.DRL);
}
if (kbuilder.hasErrors()) {
throw new RuntimeException("Error while building knowledge base: " + kbuilder.getErrors());
}
this.ksession = kbuilder.newKnowledgeBase().newKieSession();
try {
// register global for aggregated events
ksession.setGlobal("simulation", new ArrayList<AggregatedActivitySimulationEvent>());
} catch (Exception e) {
// catch it as there could be no simulation global declared
}
}
public void storeEvent(SimulationEvent event) {
super.storeEvent(event);
ksession.insert(event);
if (fireRulesOnStore) {
ksession.fireAllRules();
}
}
public void fireAllRules() {
ksession.fireAllRules();
}
public KieSession getSession() {
return this.ksession;
}
public List<AggregatedSimulationEvent> getAggregatedEvents() {
return (List<AggregatedSimulationEvent>) this.ksession.getGlobal("simulation");
}
public Object getGlobal(String globalName) {
return this.ksession.getGlobal(globalName);
}
@Override
public void close() {
super.close();
this.ksession.dispose();
}
}
|
package org.mariadb.jdbc.internal.protocol;
import org.mariadb.jdbc.HostAddress;
import org.mariadb.jdbc.MariaDbConnection;
import org.mariadb.jdbc.UrlParser;
import org.mariadb.jdbc.internal.MariaDbServerCapabilities;
import org.mariadb.jdbc.internal.MyX509TrustManager;
import org.mariadb.jdbc.internal.failover.FailoverProxy;
import org.mariadb.jdbc.internal.packet.send.*;
import org.mariadb.jdbc.internal.protocol.authentication.AuthenticationProviderHolder;
import org.mariadb.jdbc.internal.queryresults.ExecutionResult;
import org.mariadb.jdbc.internal.queryresults.SingleExecutionResult;
import org.mariadb.jdbc.internal.queryresults.resultset.MariaSelectResultSet;
import org.mariadb.jdbc.internal.util.*;
import org.mariadb.jdbc.internal.util.buffer.Buffer;
import org.mariadb.jdbc.internal.packet.read.ReadInitialConnectPacket;
import org.mariadb.jdbc.internal.packet.read.ReadPacketFetcher;
import org.mariadb.jdbc.internal.packet.read.Packet;
import org.mariadb.jdbc.internal.util.constant.HaMode;
import org.mariadb.jdbc.internal.util.constant.ParameterConstant;
import org.mariadb.jdbc.internal.util.constant.ServerStatus;
import org.mariadb.jdbc.internal.util.dao.QueryException;
import org.mariadb.jdbc.internal.packet.result.*;
import org.mariadb.jdbc.internal.stream.DecompressInputStream;
import org.mariadb.jdbc.internal.stream.PacketOutputStream;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocket;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.X509TrustManager;
import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.URL;
import java.nio.charset.Charset;
import java.security.KeyStore;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.locks.ReentrantLock;
public abstract class AbstractConnectProtocol implements Protocol {
private final String username;
private final String password;
private boolean hostFailed;
private String version;
private int majorVersion;
private int minorVersion;
private int patchVersion;
private Map<String, String> serverData;
private Calendar cal;
protected final ReentrantLock lock;
protected final UrlParser urlParser;
protected final Options options;
protected Socket socket;
protected PacketOutputStream writer;
protected boolean readOnly = false;
protected ReadPacketFetcher packetFetcher;
protected HostAddress currentHost;
protected FailoverProxy proxy;
protected volatile boolean connected = false;
protected boolean explicitClosed = false;
protected String database;
protected long serverThreadId;
protected PrepareStatementCache prepareStatementCache;
protected boolean moreResults = false;
public boolean moreResultsTypeBinary = false;
public boolean hasWarnings = false;
public MariaSelectResultSet activeStreamingResult = null;
public int dataTypeMappingFlags;
public short serverStatus;
/**
* Get a protocol instance.
*
* @param urlParser connection URL infos
* @param lock the lock for thread synchronisation
*/
public AbstractConnectProtocol(final UrlParser urlParser, final ReentrantLock lock) {
this.lock = lock;
this.urlParser = urlParser;
this.options = this.urlParser.getOptions();
this.database = (urlParser.getDatabase() == null ? "" : urlParser.getDatabase());
this.username = (urlParser.getUsername() == null ? "" : urlParser.getUsername());
this.password = (urlParser.getPassword() == null ? "" : urlParser.getPassword());
if (options.cachePrepStmts) {
prepareStatementCache = PrepareStatementCache.newInstance(options.prepStmtCacheSize, this);
}
setDataTypeMappingFlags();
}
/**
* Skip packets not read that are not needed.
* Packets are read according to needs.
* If some data have not been read before next execution, skip it.
*
* @throws QueryException exception
*/
public void skip() throws SQLException, QueryException {
if (activeStreamingResult != null) {
activeStreamingResult.close();
}
while (moreResults) {
SingleExecutionResult execution = new SingleExecutionResult(null, 0, true, false);
getMoreResults(execution);
}
}
public abstract void getMoreResults(ExecutionResult executionResult) throws QueryException;
public void setMoreResults(boolean moreResults, boolean isBinary) {
this.moreResults = moreResults;
this.moreResultsTypeBinary = isBinary;
}
/**
* Closes socket and stream readers/writers Attempts graceful shutdown.
*/
public void close() {
if (lock != null) {
lock.lock();
}
this.connected = false;
try {
/* If a streaming result set is open, close it.*/
skip();
} catch (Exception e) {
/* eat exception */
}
try {
if (options.cachePrepStmts) {
prepareStatementCache.clear();
}
close(packetFetcher, writer, socket);
} catch (Exception e) {
// socket is closed, so it is ok to ignore exception
} finally {
if (lock != null) {
lock.unlock();
}
}
}
protected static void close(ReadPacketFetcher fetcher, PacketOutputStream packetOutputStream, Socket socket) throws QueryException {
SendClosePacket closePacket = new SendClosePacket();
try {
try {
closePacket.send(packetOutputStream);
socket.shutdownOutput();
socket.setSoTimeout(3);
InputStream is = socket.getInputStream();
while (is.read() != -1) {
}
} catch (Throwable t) {
//eat exception
}
packetOutputStream.close();
fetcher.close();
} catch (IOException e) {
throw new QueryException("Could not close connection: " + e.getMessage(),
-1,
ExceptionMapper.SqlStates.CONNECTION_EXCEPTION.getSqlState(),
e);
} finally {
try {
socket.close();
} catch (IOException e) {
//socket closed, if any error, so not throwing error
}
}
}
private SSLSocketFactory getSslSocketFactory() throws QueryException {
if (!options.trustServerCertificate
&& options.serverSslCert == null
&& options.trustCertificateKeyStoreUrl == null
&& options.clientCertificateKeyStoreUrl == null) {
return (SSLSocketFactory) SSLSocketFactory.getDefault();
}
try {
SSLContext sslContext = SSLContext.getInstance("TLS");
X509TrustManager[] trustManager = null;
if (options.trustServerCertificate || options.serverSslCert != null
|| options.trustCertificateKeyStoreUrl != null) {
trustManager = new X509TrustManager[]{new MyX509TrustManager(options)};
}
KeyManager[] keyManager = null;
String clientCertKeystoreUrl = options.clientCertificateKeyStoreUrl;
if (clientCertKeystoreUrl != null && !clientCertKeystoreUrl.isEmpty()) {
keyManager = loadClientCerts(clientCertKeystoreUrl, options.clientCertificateKeyStorePassword);
}
sslContext.init(keyManager, trustManager, null);
return sslContext.getSocketFactory();
} catch (Exception e) {
throw new QueryException(e.getMessage(), 0, "HY000", e);
}
}
private KeyManager[] loadClientCerts(String keystoreUrl, String keystorePassword) throws Exception {
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
InputStream inStream = null;
try {
char[] certKeystorePassword = keystorePassword == null ? null : keystorePassword.toCharArray();
inStream = new URL(keystoreUrl).openStream();
KeyStore ks = KeyStore.getInstance(KeyStore.getDefaultType());
ks.load(inStream, certKeystorePassword);
keyManagerFactory.init(ks, certKeystorePassword);
} finally {
if (inStream != null) {
inStream.close();
}
}
return keyManagerFactory.getKeyManagers();
}
/**
* InitializeSocketOption.
*/
private void initializeSocketOption() {
try {
if (options.tcpNoDelay) {
socket.setTcpNoDelay(options.tcpNoDelay);
} else {
socket.setTcpNoDelay(true);
}
if (options.tcpKeepAlive) {
socket.setKeepAlive(true);
}
if (options.tcpRcvBuf != null) {
socket.setReceiveBufferSize(options.tcpRcvBuf);
}
if (options.tcpSndBuf != null) {
socket.setSendBufferSize(options.tcpSndBuf);
}
if (options.tcpAbortiveClose) {
socket.setSoLinger(true, 0);
}
} catch (Exception e) {
// if (log.isDebugEnabled())log.debug("Failed to set socket option: " + e.getLocalizedMessage());
}
}
/**
* Connect to currentHost.
*
* @throws QueryException exception
*/
public void connect() throws QueryException {
if (!isClosed()) {
close();
}
try {
connect(currentHost.host, currentHost.port);
return;
} catch (IOException e) {
throw new QueryException("Could not connect to " + currentHost + "." + e.getMessage(), -1,
ExceptionMapper.SqlStates.CONNECTION_EXCEPTION.getSqlState(), e);
}
}
/**
* Connect the client and perform handshake.
*
* @throws QueryException : handshake error, e.g wrong user or password
* @throws IOException : connection error (host/port not available)
*/
private void connect(String host, int port) throws QueryException, IOException {
socket = Utils.createSocket(urlParser, host);
initializeSocketOption();
// Bind the socket to a particular interface if the connection property
// localSocketAddress has been defined.
if (options.localSocketAddress != null) {
InetSocketAddress localAddress = new InetSocketAddress(options.localSocketAddress, 0);
socket.bind(localAddress);
}
if (!socket.isConnected()) {
InetSocketAddress sockAddr = new InetSocketAddress(host, port);
if (options.connectTimeout != null) {
socket.connect(sockAddr, options.connectTimeout);
} else {
socket.connect(sockAddr);
}
}
// Extract socketTimeout URL parameter
if (options.socketTimeout != null) {
socket.setSoTimeout(options.socketTimeout);
}
handleConnectionPhases();
if (options.useCompression) {
writer.setUseCompression(true);
packetFetcher = new ReadPacketFetcher(new DecompressInputStream(socket.getInputStream()));
}
connected = true;
loadServerData();
setSessionOptions();
writer.setMaxAllowedPacket(Integer.parseInt(serverData.get("max_allowed_packet")));
createDatabaseIfNotExist();
loadCalendar();
activeStreamingResult = null;
moreResults = false;
hasWarnings = false;
hostFailed = false;
}
/**
* Is the connection closed.
*
* @return true if the connection is closed
*/
public boolean isClosed() {
return !this.connected;
}
private void setSessionOptions() throws QueryException {
// In JDBC, connection must start in autocommit mode
// [CONJ-269] we cannot rely on serverStatus & ServerStatus.AUTOCOMMIT before this command to avoid this command.
// if autocommit=0 is set on server configuration, DB always send Autocommit on serverStatus flag
// after setting autocommit, we can rely on serverStatus value
String sessionOption = "autocommit=1";
if (options.jdbcCompliantTruncation) {
if (serverData.get("sql_mode") == null || "".equals(serverData.get("sql_mode"))) {
sessionOption += ",sql_mode='STRICT_TRANS_TABLES'";
} else {
if (!serverData.get("sql_mode").contains("STRICT_TRANS_TABLES")) {
sessionOption += ",sql_mode='" + serverData.get("sql_mode") + ",STRICT_TRANS_TABLES'";
}
}
}
if (options.sessionVariables != null) {
sessionOption += "," + options.sessionVariables;
}
executeQuery("set session " + sessionOption);
}
private void handleConnectionPhases() throws QueryException {
InputStream reader = null;
try {
reader = new BufferedInputStream(socket.getInputStream(), 16384);
packetFetcher = new ReadPacketFetcher(reader);
writer = new PacketOutputStream(socket.getOutputStream());
final ReadInitialConnectPacket greetingPacket = new ReadInitialConnectPacket(packetFetcher);
this.serverThreadId = greetingPacket.getServerThreadId();
this.version = greetingPacket.getServerVersion();
parseVersion();
int clientCapabilities = initializeClientCapabilities();
byte packetSeq = 1;
if (options.useSsl && (greetingPacket.getServerCapabilities() & MariaDbServerCapabilities.SSL) != 0) {
clientCapabilities |= MariaDbServerCapabilities.SSL;
SendSslConnectionRequestPacket amcap = new SendSslConnectionRequestPacket(clientCapabilities);
amcap.send(writer);
SSLSocketFactory sslSocketFactory = getSslSocketFactory();
SSLSocket sslSocket = (SSLSocket) sslSocketFactory.createSocket(socket,
socket.getInetAddress().getHostAddress(), socket.getPort(), true);
sslSocket.setEnabledProtocols(new String[]{"TLSv1"});
sslSocket.setUseClientMode(true);
sslSocket.startHandshake();
socket = sslSocket;
writer = new PacketOutputStream(socket.getOutputStream());
reader = new BufferedInputStream(socket.getInputStream(), 16384);
packetFetcher = new ReadPacketFetcher(reader);
packetSeq++;
} else if (options.useSsl) {
throw new QueryException("Trying to connect with ssl, but ssl not enabled in the server");
}
authentication(greetingPacket.getServerLanguage(), clientCapabilities, greetingPacket.getSeed(), packetSeq,
greetingPacket.getPluginName(), greetingPacket.getServerCapabilities());
} catch (IOException e) {
if (reader != null) {
try {
reader.close();
} catch (IOException ee) {
//eat exception
}
}
throw new QueryException("Could not connect to " + currentHost.host + ":" + currentHost.port + ": " + e.getMessage(), -1,
ExceptionMapper.SqlStates.CONNECTION_EXCEPTION.getSqlState(), e);
}
}
private void authentication(byte serverLanguage, int clientCapabilities, byte[] seed, byte packetSeq, String plugin, int serverCapabilities)
throws QueryException, IOException {
final SendHandshakeResponsePacket cap = new SendHandshakeResponsePacket(this.username,
this.password,
database,
clientCapabilities,
decideLanguage(serverLanguage),
seed,
packetSeq,
plugin,
options.connectionAttributes,
serverThreadId);
cap.send(writer);
Buffer buffer = packetFetcher.getPacket();
if ((buffer.getByteAt(0) & 0xFF) == 0xFE) {
InterfaceAuthSwitchSendResponsePacket interfaceSendPacket;
if ((serverCapabilities & MariaDbServerCapabilities.PLUGIN_AUTH) != 0) {
//AuthSwitchRequest packet.
buffer.readByte();
plugin = buffer.readString(Charset.forName("ASCII"));
byte[] authData = buffer.readRawBytes(buffer.remaining());
//Authentication according to plugin.
//see AuthenticationProviderHolder for implement other plugin
interfaceSendPacket = AuthenticationProviderHolder.getAuthenticationProvider()
.processAuthPlugin(packetFetcher, plugin, password, authData, packetFetcher.getLastPacketSeq() + 1);
} else {
interfaceSendPacket = new SendOldPasswordAuthPacket(this.password, Utils.copyWithLength(seed, 8),
packetFetcher.getLastPacketSeq() + 1);
}
interfaceSendPacket.send(writer);
interfaceSendPacket.handleResultPacket(packetFetcher);
} else {
if (buffer.getByteAt(0) == Packet.ERROR) {
ErrorPacket errorPacket = new ErrorPacket(buffer);
throw new QueryException("Could not connect: " + errorPacket.getMessage(), errorPacket.getErrorNumber(), errorPacket.getSqlState());
}
serverStatus = new OkPacket(buffer).getServerStatus();
}
}
private int initializeClientCapabilities() {
int capabilities =
MariaDbServerCapabilities.LONG_PASSWORD
| MariaDbServerCapabilities.IGNORE_SPACE
| MariaDbServerCapabilities.CLIENT_PROTOCOL_41
| MariaDbServerCapabilities.TRANSACTIONS
| MariaDbServerCapabilities.SECURE_CONNECTION
| MariaDbServerCapabilities.LOCAL_FILES
| MariaDbServerCapabilities.MULTI_RESULTS
| MariaDbServerCapabilities.PS_MULTI_RESULTS
| MariaDbServerCapabilities.FOUND_ROWS
| MariaDbServerCapabilities.PLUGIN_AUTH
| MariaDbServerCapabilities.CONNECT_ATTRS
| MariaDbServerCapabilities.PLUGIN_AUTH_LENENC_CLIENT_DATA;
if (options.allowMultiQueries || (options.rewriteBatchedStatements)) {
capabilities |= MariaDbServerCapabilities.MULTI_STATEMENTS;
}
if (options.useCompression) {
capabilities |= MariaDbServerCapabilities.COMPRESS;
}
if (options.interactiveClient) {
capabilities |= MariaDbServerCapabilities.CLIENT_INTERACTIVE;
}
// If a database is given, but createDatabaseIfNotExist is not defined or is false,
// then just try to connect to the given database
if (database != null && !options.createDatabaseIfNotExist) {
capabilities |= MariaDbServerCapabilities.CONNECT_WITH_DB;
}
return capabilities;
}
/**
* If createDB is true, then just try to create the database and to use it.
* @throws QueryException if connection failed
*/
private void createDatabaseIfNotExist() throws QueryException {
if (checkIfMaster() && options.createDatabaseIfNotExist) {
// Try to create the database if it does not exist
String quotedDb = MariaDbConnection.quoteIdentifier(this.database);
executeQuery("CREATE DATABASE IF NOT EXISTS " + quotedDb);
executeQuery("USE " + quotedDb);
}
}
private void loadCalendar() throws QueryException {
String timeZone = null;
if (options.serverTimezone != null) {
timeZone = options.serverTimezone;
}
if (timeZone == null) {
timeZone = getServerData("time_zone");
if ("SYSTEM".equals(timeZone)) {
timeZone = getServerData("system_time_zone");
}
}
//handle custom timezone id
if (timeZone != null && timeZone.length() >= 2
&& (timeZone.startsWith("+") || timeZone.startsWith("-"))
&& Character.isDigit(timeZone.charAt(1))) {
timeZone = "GMT" + timeZone;
}
try {
TimeZone tz = Utils.getTimeZone(timeZone);
cal = Calendar.getInstance(tz);
} catch (SQLException e) {
cal = null;
if (!options.useLegacyDatetimeCode) {
if (options.serverTimezone != null) {
throw new QueryException("The server time_zone '" + timeZone + "' defined in the 'serverTimezone' parameter cannot be parsed "
+ "by java TimeZone implementation. See java.util.TimeZone#getAvailableIDs() for available TimeZone, depending on your "
+ "JRE implementation.", 0, "01S00");
} else {
throw new QueryException("The server time_zone '" + timeZone + "' cannot be parsed. The server time zone must defined in the "
+ "jdbc url string with the 'serverTimezone' parameter (or server time zone must be defined explicitly). See "
+ "java.util.TimeZone#getAvailableIDs() for available TimeZone, depending on your JRE implementation.", 0, "01S00");
}
}
}
}
private void loadServerData() throws QueryException, IOException {
serverData = new TreeMap<>();
SingleExecutionResult qr = new SingleExecutionResult(null, 0, true, false);
try {
executeQuery(qr, "SHOW VARIABLES WHERE Variable_name in ("
+ "'max_allowed_packet', "
+ "'system_time_zone', "
+ "'time_zone', "
+ "'sql_mode'"
+ ")", ResultSet.TYPE_FORWARD_ONLY);
MariaSelectResultSet resultSet = qr.getResult();
while (resultSet.next()) {
serverData.put(resultSet.getString(1), resultSet.getString(2));
}
} catch (SQLException sqle) {
throw new QueryException("could not load system variables", -1, ExceptionMapper.SqlStates.CONNECTION_EXCEPTION.getSqlState(), sqle);
}
}
public String getServerData(String code) {
return serverData.get(code);
}
public boolean checkIfMaster() throws QueryException {
return isMasterConnection();
}
private boolean isServerLanguageUtf8mb4(byte serverLanguage) {
Byte[] utf8mb4Languages = {
(byte) 45, (byte) 46, (byte) 224, (byte) 225, (byte) 226, (byte) 227, (byte) 228,
(byte) 229, (byte) 230, (byte) 231, (byte) 232, (byte) 233, (byte) 234, (byte) 235,
(byte) 236, (byte) 237, (byte) 238, (byte) 239, (byte) 240, (byte) 241, (byte) 242,
(byte) 243, (byte) 245
};
return Arrays.asList(utf8mb4Languages).contains(serverLanguage);
}
private byte decideLanguage(byte serverLanguage) {
//force UTF8mb4 if possible, UTF8 if not.
byte result = (isServerLanguageUtf8mb4(serverLanguage) ? serverLanguage : 33);
return result;
}
/**
* Check that next read packet is a End-of-file packet.
* @throws QueryException if not a End-of-file packet
* @throws IOException if connection error occur
*/
public void readEofPacket() throws QueryException, IOException {
Buffer buffer = packetFetcher.getReusableBuffer();
switch (buffer.getByteAt(0)) {
case (byte) 0xfe: //EOF
EndOfFilePacket eof = new EndOfFilePacket(buffer);
this.hasWarnings = eof.getWarningCount() > 0;
this.serverStatus = eof.getStatusFlags();
break;
case (byte) 0xff: //ERROR
ErrorPacket ep = new ErrorPacket(buffer);
throw new QueryException("Could not connect: " + ep.getMessage(), ep.getErrorNumber(), ep.getSqlState());
default:
throw new QueryException("Unexpected stream type " + buffer.getByteAt(0)
+ " instead of EOF");
}
}
public void setHostFailedWithoutProxy() {
hostFailed = true;
close();
}
public UrlParser getUrlParser() {
return urlParser;
}
public boolean isMasterConnection() {
return ParameterConstant.TYPE_MASTER.equals(currentHost.type);
}
public boolean mustBeMasterConnection() {
return true;
}
public boolean noBackslashEscapes() {
return ((serverStatus & ServerStatus.NO_BACKSLASH_ESCAPES) != 0);
}
/**
* Connect without proxy. (use basic failover implementation)
*
* @throws QueryException exception
*/
public void connectWithoutProxy() throws QueryException {
if (!isClosed()) {
close();
}
Random rand = new Random();
List<HostAddress> addrs = urlParser.getHostAddresses();
List<HostAddress> hosts = new LinkedList<>(addrs);
// There could be several addresses given in the URL spec, try all of them, and throw exception if all hosts
// fail.
while (!hosts.isEmpty()) {
if (urlParser.getHaMode().equals(HaMode.LOADBALANCE)) {
currentHost = hosts.get(rand.nextInt(hosts.size()));
} else {
currentHost = hosts.get(0);
}
hosts.remove(currentHost);
try {
connect(currentHost.host, currentHost.port);
return;
} catch (IOException e) {
if (hosts.isEmpty()) {
throw new QueryException("Could not connect to " + HostAddress.toString(addrs)
+ " : " + e.getMessage(), -1, ExceptionMapper.SqlStates.CONNECTION_EXCEPTION.getSqlState(), e);
}
}
}
}
public boolean shouldReconnectWithoutProxy() {
return (!((serverStatus & ServerStatus.IN_TRANSACTION) != 0) && hostFailed && urlParser.getOptions().autoReconnect);
}
public String getServerVersion() {
return version;
}
public boolean getReadonly() {
return readOnly;
}
public void setReadonly(final boolean readOnly) {
this.readOnly = readOnly;
}
public HostAddress getHostAddress() {
return currentHost;
}
public void setHostAddress(HostAddress host) {
this.currentHost = host;
this.readOnly = ParameterConstant.TYPE_SLAVE.equals(this.currentHost.type);
}
public String getHost() {
return currentHost.host;
}
public FailoverProxy getProxy() {
return proxy;
}
public void setProxy(FailoverProxy proxy) {
this.proxy = proxy;
}
public int getPort() {
return currentHost.port;
}
public String getDatabase() {
return database;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
private void parseVersion() {
String[] versionArray = version.split("[^0-9]");
if (versionArray.length > 0) {
majorVersion = Integer.parseInt(versionArray[0]);
}
if (versionArray.length > 1) {
minorVersion = Integer.parseInt(versionArray[1]);
}
if (versionArray.length > 2) {
patchVersion = Integer.parseInt(versionArray[2]);
}
}
public int getMajorServerVersion() {
return majorVersion;
}
public int getMinorServerVersion() {
return minorVersion;
}
/**
* Utility method to check if database version is greater than parameters.
* @param major major version
* @param minor minor version
* @param patch patch version
* @return true if version is greater than parameters
*/
public boolean versionGreaterOrEqual(int major, int minor, int patch) {
if (this.majorVersion > major) {
return true;
}
if (this.majorVersion < major) {
return false;
}
/*
* Major versions are equal, compare minor versions
*/
if (this.minorVersion > minor) {
return true;
}
if (this.minorVersion < minor) {
return false;
}
//Minor versions are equal, compare patch version.
if (this.patchVersion > patch) {
return true;
}
if (this.patchVersion < patch) {
return false;
}
// Patch versions are equal => versions are equal.
return true;
}
public boolean getPinGlobalTxToPhysicalConnection() {
return this.options.pinGlobalTxToPhysicalConnection;
}
/**
* Has warnings.
*
* @return true if as warnings.
*/
public boolean hasWarnings() {
lock.lock();
try {
return hasWarnings;
} finally {
lock.unlock();
}
}
/**
* Is connected.
*
* @return true if connected
*/
public boolean isConnected() {
lock.lock();
try {
return connected;
} finally {
lock.unlock();
}
}
private void setDataTypeMappingFlags() {
dataTypeMappingFlags = 0;
if (options.tinyInt1isBit) {
dataTypeMappingFlags |= MariaSelectResultSet.TINYINT1_IS_BIT;
}
if (options.yearIsDateType) {
dataTypeMappingFlags |= MariaSelectResultSet.YEAR_IS_DATE_TYPE;
}
}
public long getServerThreadId() {
return serverThreadId;
}
public int getDataTypeMappingFlags() {
return dataTypeMappingFlags;
}
public boolean isExplicitClosed() {
return explicitClosed;
}
public Calendar getCalendar() {
return cal;
}
public Options getOptions() {
return options;
}
public void setHasWarnings(boolean hasWarnings) {
this.hasWarnings = hasWarnings;
}
public MariaSelectResultSet getActiveStreamingResult() {
return activeStreamingResult;
}
public void setActiveStreamingResult(MariaSelectResultSet activeStreamingResult) {
this.activeStreamingResult = activeStreamingResult;
}
@Override
public ReentrantLock getLock() {
return lock;
}
@Override
public boolean hasMoreResults() {
return moreResults;
}
public PrepareStatementCache getPrepareStatementCache() {
return prepareStatementCache;
}
public abstract void executeQuery(final String sql) throws QueryException;
}
|
package org.mbari.m3.vars.annotation.commands;
import org.mbari.m3.vars.annotation.UIToolBox;
import org.mbari.m3.vars.annotation.events.AnnotationsChangedEvent;
import org.mbari.m3.vars.annotation.model.Annotation;
import org.mbari.m3.vars.annotation.model.User;
import org.mbari.m3.vars.annotation.services.ConceptService;
import org.mbari.m3.vars.annotation.ui.AnnotationServiceDecorator;
import org.mbari.vcr4j.util.Preconditions;
import java.time.Instant;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author Brian Schlining
* @since 2017-09-17T16:24:00
*/
public abstract class UpdateAnnotationsCmd implements Command {
protected List<Annotation> originalAnnotations;
protected List<Annotation> changedAnnotations;
private volatile boolean checkConceptName;
private boolean updateUser = false;
public UpdateAnnotationsCmd(List<Annotation> originalAnnotations,
List<Annotation> changedAnnotations) {
this(originalAnnotations, changedAnnotations, false);
}
/**
*
* @param originalAnnotations The uncahgned annotations
* @param changedAnnotations
* @param checkConceptName
*/
public UpdateAnnotationsCmd(List<Annotation> originalAnnotations,
List<Annotation> changedAnnotations,
boolean checkConceptName) {
this(originalAnnotations, changedAnnotations, checkConceptName, false);
}
public UpdateAnnotationsCmd(List<Annotation> originalAnnotations,
List<Annotation> changedAnnotations,
boolean checkConceptName,
boolean updateUser) {
Preconditions.checkArgument(originalAnnotations != null,
"Original annotations can not be null");
Preconditions.checkArgument(changedAnnotations != null,
"Changed annotations can not be null");
Preconditions.checkArgument(originalAnnotations.size() == changedAnnotations.size(),
"The Original annotations and the changed annotations are not the same size");
this.originalAnnotations = originalAnnotations;
this.changedAnnotations = changedAnnotations;
final Instant now = Instant.now();
changedAnnotations.forEach(a -> a.setObservationTimestamp(now));
this.checkConceptName = checkConceptName;
this.updateUser = updateUser;
}
@Override
public void apply(UIToolBox toolBox) {
// Dont' change user when activity is changed.
if (updateUser) {
final User user = toolBox.getData().getUser();
if (user != null) {
changedAnnotations.forEach(a -> a.setObserver(user.getUsername()));
}
}
if (checkConceptName) {
ConceptService conceptService = toolBox.getServices().getConceptService();
CompletableFuture[] futures = changedAnnotations.stream()
.map(a -> conceptService.findConcept(a.getConcept())
.thenAccept(opt ->
opt.ifPresent(c -> a.setConcept(c.getName()))))
.toArray(i -> new CompletableFuture[i]);
CompletableFuture.allOf(futures)
.thenAccept(v -> {
doUpdate(toolBox, changedAnnotations);
checkConceptName = false;
});
}
else {
doUpdate(toolBox, changedAnnotations);
}
}
@Override
public void unapply(UIToolBox toolBox) {
doUpdate(toolBox, originalAnnotations);
}
private void doUpdate(UIToolBox toolBox, List<Annotation> annotations) {
toolBox.getServices()
.getAnnotationService()
.updateAnnotations(annotations)
.thenAccept(as -> {
AnnotationServiceDecorator asd = new AnnotationServiceDecorator(toolBox);
Set<UUID> uuids = originalAnnotations.stream()
.map(Annotation::getObservationUuid)
.collect(Collectors.toSet());
asd.refreshAnnotationsView(uuids);
});
}
}
|
package org.neo4j.kernel.impl.transaction.xaframework;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.transaction.xa.XAException;
import javax.transaction.xa.Xid;
import org.neo4j.kernel.impl.transaction.TransactionFailureException;
import org.neo4j.kernel.impl.transaction.XidImpl;
import org.neo4j.kernel.impl.util.ArrayMap;
import org.neo4j.kernel.impl.util.FileUtils;
/**
* <CODE>XaLogicalLog</CODE> is a transaction and logical log combined. In
* this log information about the transaction (such as started, prepared and
* committed) will be written. All commands participating in the transaction
* will also be written to the log.
* <p>
* Normally you don't have to do anything with this log except open it after it
* has been instanciated (see {@link XaContainer}). The only method that may be
* of use when implementing a XA compatible resource is the
* {@link #getCurrentTxIdentifier}. Leave everything else be unless you know
* what you're doing.
* <p>
* When the log is opened it will be scaned for uncompleted transactions and
* those transactions will be re-created. When scan of log is complete all
* transactions that hasn't entered prepared state will be marked as done
* (implies rolledback) and dropped. All transactions that have been prepared
* will be held in memory until the transaction manager tells them to commit.
* Transaction that already started commit but didn't get flagged as done will
* be re-committed.
*/
public class XaLogicalLog
{
private Logger log;
// empty record due to memory mapped file
private static final byte EMPTY = (byte) 0;
// tx has started
private static final byte TX_START = (byte) 1;
// tx has been prepared
private static final byte TX_PREPARE = (byte) 2;
// a XaCommand in a transaction
private static final byte COMMAND = (byte) 3;
// done, either a read only tx or rolledback/forget
private static final byte DONE = (byte) 4;
// tx one-phase commit
private static final byte TX_1P_COMMIT = (byte) 5;
// tx two-phase commit
private static final byte TX_2P_COMMIT = (byte) 6;
private static final char CLEAN = 'C';
private static final char LOG1 = '1';
private static final char LOG2 = '2';
private FileChannel fileChannel = null;
private final ByteBuffer buffer;
private LogBuffer writeBuffer = null;
private long logVersion = 0;
private ArrayMap<Integer,StartEntry> xidIdentMap =
new ArrayMap<Integer,StartEntry>( 4, false, true );
private Map<Integer,XaTransaction> recoveredTxMap =
new HashMap<Integer,XaTransaction>();
private int nextIdentifier = 1;
private boolean scanIsComplete = false;
private String fileName = null;
private final XaResourceManager xaRm;
private final XaCommandFactory cf;
private final XaTransactionFactory xaTf;
private char currentLog = CLEAN;
private boolean keepLogs = false;
private boolean autoRotate = true;
private long rotateAtSize = 10*1024*1024; // 10MB
private boolean backupSlave = false;
private boolean useMemoryMapped = true;
XaLogicalLog( String fileName, XaResourceManager xaRm, XaCommandFactory cf,
XaTransactionFactory xaTf, Map<Object,Object> config )
{
this.fileName = fileName;
this.xaRm = xaRm;
this.cf = cf;
this.xaTf = xaTf;
this.useMemoryMapped = getMemoryMapped( config );
log = Logger.getLogger( this.getClass().getName() + "/" + fileName );
buffer = ByteBuffer.allocateDirect( 9 + Xid.MAXGTRIDSIZE
+ Xid.MAXBQUALSIZE * 10 );
}
private boolean getMemoryMapped( Map<Object,Object> config )
{
if ( config != null )
{
String value = (String) config.get( "use_memory_mapped_buffers" );
if ( value != null && value.toLowerCase().equals( "false" ) )
{
return false;
}
}
return true;
}
synchronized void open() throws IOException
{
String activeFileName = fileName + ".active";
if ( !new File( activeFileName ).exists() )
{
if ( new File( fileName ).exists() )
{
// old < b8 xaframework with no log rotation and we need to
// do recovery on it
open( fileName );
}
else
{
open( fileName + ".1" );
setActiveLog( LOG1 );
}
}
else
{
FileChannel fc = new RandomAccessFile( activeFileName ,
"rw" ).getChannel();
byte bytes[] = new byte[256];
ByteBuffer buf = ByteBuffer.wrap( bytes );
int read = fc.read( buf );
fc.close();
if ( read != 4 )
{
throw new IllegalStateException( "Read " + read +
" bytes from " + activeFileName + " but expected 4" );
}
buf.flip();
char c = buf.asCharBuffer().get();
File copy = new File( fileName + ".copy" );
if ( copy.exists() )
{
if ( !copy.delete() )
{
log.warning( "Unable to delete " + copy.getName() );
}
}
if ( c == CLEAN )
{
// clean
String newLog = fileName + ".1";
if ( new File( newLog ).exists() )
{
throw new IllegalStateException(
"Active marked as clean but log " + newLog + " exist" );
}
open( newLog );
setActiveLog( LOG1 );
}
else if ( c == LOG1 )
{
String newLog = fileName + ".1";
if ( !new File( newLog ).exists() )
{
throw new IllegalStateException(
"Active marked as 1 but no " + newLog + " exist" );
}
currentLog = LOG1;
File otherLog = new File( fileName + ".2" );
if ( otherLog.exists() )
{
if ( !otherLog.delete() )
{
log.warning( "Unable to delete " + copy.getName() );
}
}
open( newLog );
}
else if ( c == LOG2 )
{
String newLog = fileName + ".2";
if ( !new File( newLog ).exists() )
{
throw new IllegalStateException(
"Active marked as 2 but no " + newLog + " exist" );
}
File otherLog = new File( fileName + ".1" );
if ( otherLog.exists() )
{
if ( !otherLog.delete() )
{
log.warning( "Unable to delete " + copy.getName() );
}
}
currentLog = LOG2;
open( newLog );
}
else
{
throw new IllegalStateException( "Unknown active log: " + c );
}
}
if ( !useMemoryMapped )
{
writeBuffer = new DirectMappedLogBuffer( fileChannel );
}
else
{
writeBuffer = new MemoryMappedLogBuffer( fileChannel );
}
}
private void open( String fileToOpen ) throws IOException
{
fileChannel = new RandomAccessFile( fileToOpen, "rw" ).getChannel();
if ( fileChannel.size() != 0 )
{
doInternalRecovery( fileToOpen );
}
else
{
logVersion = xaTf.getCurrentVersion();
buffer.clear();
buffer.putLong( logVersion );
buffer.flip();
fileChannel.write( buffer );
scanIsComplete = true;
}
}
public boolean scanIsComplete()
{
return scanIsComplete;
}
private int getNextIdentifier()
{
nextIdentifier++;
if ( nextIdentifier < 0 )
{
nextIdentifier = 1;
}
return nextIdentifier;
}
// returns identifier for transaction
// [TX_START][xid[gid.length,bid.lengh,gid,bid]][identifier][format id]
public synchronized int start( Xid xid ) throws XAException
{
if ( backupSlave )
{
throw new XAException( "Resource is configured as backup slave, " +
"no new transactions can be started for " + fileName + "." +
currentLog );
}
int xidIdent = getNextIdentifier();
try
{
byte globalId[] = xid.getGlobalTransactionId();
byte branchId[] = xid.getBranchQualifier();
int formatId = xid.getFormatId();
long position = writeBuffer.getFileChannelPosition();
writeBuffer.put( TX_START ).put( (byte) globalId.length ).put(
(byte) branchId.length ).put( globalId ).put( branchId )
.putInt( xidIdent ).putInt( formatId );
xidIdentMap.put( xidIdent, new StartEntry( xid, position ) );
}
catch ( IOException e )
{
throw new XAException( "Logical log couldn't start transaction: "
+ e );
}
return xidIdent;
}
private boolean readTxStartEntry() throws IOException
{
// get the global id
long position = fileChannel.position();
buffer.clear();
buffer.limit( 1 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
byte globalIdLength = buffer.get();
// get the branchId id
buffer.clear();
buffer.limit( 1 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
byte branchIdLength = buffer.get();
byte globalId[] = new byte[globalIdLength];
ByteBuffer tmpBuffer = ByteBuffer.wrap( globalId );
if ( fileChannel.read( tmpBuffer ) != globalId.length )
{
return false;
}
byte branchId[] = new byte[branchIdLength];
tmpBuffer = ByteBuffer.wrap( branchId );
if ( fileChannel.read( tmpBuffer ) != branchId.length )
{
return false;
}
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
if ( identifier >= nextIdentifier )
{
nextIdentifier = (identifier + 1);
}
// get the format id
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int formatId = buffer.getInt();
// re-create the transaction
Xid xid = new XidImpl( globalId, branchId, formatId );
xidIdentMap.put( identifier, new StartEntry( xid, position ) );
XaTransaction xaTx = xaTf.create( identifier );
xaTx.setRecovered();
recoveredTxMap.put( identifier, xaTx );
xaRm.injectStart( xid, xaTx );
return true;
}
// [TX_PREPARE][identifier]
public synchronized void prepare( int identifier ) throws XAException
{
assert xidIdentMap.get( identifier ) != null;
try
{
writeBuffer.put( TX_PREPARE ).putInt( identifier );
writeBuffer.force();
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark prepare ["
+ identifier + "] " + e );
}
}
private boolean readTxPrepareEntry() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
return false;
}
Xid xid = entry.getXid();
if ( xaRm.injectPrepare( xid ) )
{
// read only we can remove
xidIdentMap.remove( identifier );
recoveredTxMap.remove( identifier );
}
return true;
}
// [TX_1P_COMMIT][identifier]
public synchronized void commitOnePhase( int identifier )
throws XAException
{
assert xidIdentMap.get( identifier ) != null;
try
{
writeBuffer.put( TX_1P_COMMIT ).putInt( identifier );
writeBuffer.force();
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark 1P-commit ["
+ identifier + "] " + e );
}
}
private boolean readTxOnePhaseCommit() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
return false;
}
Xid xid = entry.getXid();
try
{
xaRm.injectOnePhaseCommit( xid );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
return true;
}
// [DONE][identifier]
public synchronized void done( int identifier ) throws XAException
{
if ( backupSlave )
{
return;
}
assert xidIdentMap.get( identifier ) != null;
try
{
writeBuffer.put( DONE ).putInt( identifier );
xidIdentMap.remove( identifier );
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark as done ["
+ identifier + "] " + e );
}
}
// [DONE][identifier] called from XaResourceManager during internal recovery
synchronized void doneInternal( int identifier ) throws IOException
{
buffer.clear();
buffer.put( DONE ).putInt( identifier );
buffer.flip();
fileChannel.write( buffer );
xidIdentMap.remove( identifier );
}
private boolean readDoneEntry() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
return false;
}
Xid xid = entry.getXid();
xaRm.pruneXid( xid );
xidIdentMap.remove( identifier );
recoveredTxMap.remove( identifier );
return true;
}
// [TX_2P_COMMIT][identifier]
public synchronized void commitTwoPhase( int identifier ) throws XAException
{
assert xidIdentMap.get( identifier ) != null;
try
{
writeBuffer.put( TX_2P_COMMIT ).putInt( identifier );
writeBuffer.force();
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark 2PC ["
+ identifier + "] " + e );
}
}
private boolean readTxTwoPhaseCommit() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
return false;
}
Xid xid = entry.getXid();
if ( xid == null )
{
return false;
}
try
{
xaRm.injectTwoPhaseCommit( xid );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
return true;
}
// [COMMAND][identifier][COMMAND_DATA]
public synchronized void writeCommand( XaCommand command, int identifier )
throws IOException
{
checkLogRotation();
assert xidIdentMap.get( identifier ) != null;
writeBuffer.put( COMMAND ).putInt( identifier );
command.writeToFile( writeBuffer ); // fileChannel, buffer );
}
private boolean readCommandEntry() throws IOException
{
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
XaCommand command = cf.readCommand( fileChannel, buffer );
if ( command == null )
{
// readCommand returns null if full command couldn't be loaded
return false;
}
command.setRecovered();
XaTransaction xaTx = recoveredTxMap.get( identifier );
xaTx.injectCommand( command );
return true;
}
private void checkLogRotation() throws IOException
{
if ( autoRotate &&
writeBuffer.getFileChannelPosition() >= rotateAtSize )
{
long currentPos = writeBuffer.getFileChannelPosition();
long firstStartEntry = getFirstStartEntry( currentPos );
// only rotate if no huge tx is running
if ( ( currentPos - firstStartEntry ) < rotateAtSize / 2 )
{
rotate();
}
}
}
private void renameCurrentLogFileAndIncrementVersion( String logFileName,
long endPosition ) throws IOException
{
File file = new File( logFileName );
if ( !file.exists() )
{
throw new IOException( "Logical log[" + logFileName +
"] not found" );
}
String newName = fileName + ".v" + xaTf.getAndSetNewVersion();
File newFile = new File( newName );
boolean renamed = FileUtils.renameFile( file, newFile );
if ( !renamed )
{
throw new IOException( "Failed to rename log to: " + newName );
}
else
{
try
{
FileChannel channel = new RandomAccessFile( newName,
"rw" ).getChannel();
FileUtils.truncateFile( channel, endPosition );
}
catch ( IOException e )
{
log.log( Level.WARNING,
"Failed to truncate log at correct size", e );
}
}
}
private void deleteCurrentLogFile( String logFileName ) throws IOException
{
File file = new File( logFileName );
if ( !file.exists() )
{
throw new IOException( "Logical log[" + logFileName +
"] not found" );
}
boolean deleted = FileUtils.deleteFile( file );
if ( !deleted )
{
log.warning( "Unable to delete clean logical log[" + logFileName +
"]" );
}
}
private void releaseCurrentLogFile() throws IOException
{
if ( writeBuffer != null )
{
writeBuffer.force();
writeBuffer = null;
}
fileChannel.close();
fileChannel = null;
}
public synchronized void close() throws IOException
{
if ( fileChannel == null || !fileChannel.isOpen() )
{
log.fine( "Logical log: " + fileName + " already closed" );
return;
}
long endPosition = writeBuffer.getFileChannelPosition();
if ( xidIdentMap.size() > 0 )
{
log.info( "Close invoked with " + xidIdentMap.size() +
" running transaction(s). " );
writeBuffer.force();
writeBuffer = null;
fileChannel.close();
log.info( "Dirty log: " + fileName + "." + currentLog +
" now closed. Recovery will be started automatically next " +
"time it is opened." );
return;
}
releaseCurrentLogFile();
char logWas = currentLog;
if ( currentLog != CLEAN ) // again special case, see above
{
setActiveLog( CLEAN );
}
if ( !keepLogs || backupSlave )
{
if ( logWas == CLEAN )
{
// special case going from old xa version with no log rotation
// and we started with a recovery
deleteCurrentLogFile( fileName );
}
else
{
deleteCurrentLogFile( fileName + "." + logWas );
}
}
else
{
renameCurrentLogFileAndIncrementVersion( fileName + "." +
logWas, endPosition );
}
}
private void doInternalRecovery( String logFileName ) throws IOException
{
log.info( "Non clean shutdown detected on log [" + logFileName +
"]. Recovery started ..." );
// get log creation time
buffer.clear();
buffer.limit( 8 );
if ( fileChannel.read( buffer ) != 8 )
{
log.info( "Unable to read timestamp information, "
+ "no records in logical log." );
fileChannel.close();
boolean success = FileUtils.renameFile( new File( logFileName ),
new File( logFileName + "_unknown_timestamp_" +
System.currentTimeMillis() + ".log" ) );
assert success;
fileChannel = new RandomAccessFile( logFileName,
"rw" ).getChannel();
return;
}
buffer.flip();
logVersion = buffer.getLong();
log.fine( "Logical log version: " + logVersion );
long logEntriesFound = 0;
long lastEntryPos = fileChannel.position();
while ( readEntry() )
{
logEntriesFound++;
lastEntryPos = fileChannel.position();
}
// make sure we overwrite any broken records
fileChannel.position( lastEntryPos );
scanIsComplete = true;
log.fine( "Internal recovery completed, scanned " + logEntriesFound
+ " log entries." );
xaRm.checkXids();
if ( xidIdentMap.size() == 0 )
{
log.fine( "Recovery completed." );
}
else
{
log.fine( "[" + logFileName + "] Found " + xidIdentMap.size()
+ " prepared 2PC transactions." );
for ( StartEntry entry : xidIdentMap.values() )
{
log.fine( "[" + logFileName + "] 2PC xid[" +
entry.getXid() + "]" );
}
}
recoveredTxMap.clear();
}
// for testing, do not use!
void reset()
{
xidIdentMap.clear();
recoveredTxMap.clear();
}
private boolean readEntry() throws IOException
{
buffer.clear();
buffer.limit( 1 );
if ( fileChannel.read( buffer ) != buffer.limit() )
{
// ok no more entries we're done
return false;
}
buffer.flip();
byte entry = buffer.get();
switch ( entry )
{
case TX_START:
return readTxStartEntry();
case TX_PREPARE:
return readTxPrepareEntry();
case TX_1P_COMMIT:
return readTxOnePhaseCommit();
case TX_2P_COMMIT:
return readTxTwoPhaseCommit();
case COMMAND:
return readCommandEntry();
case DONE:
return readDoneEntry();
case EMPTY:
fileChannel.position( fileChannel.position() - 1 );
return false;
default:
throw new IOException( "Internal recovery failed, "
+ "unknown log entry[" + entry + "]" );
}
}
private ArrayMap<Thread,Integer> txIdentMap =
new ArrayMap<Thread,Integer>( 5, true, true );
void registerTxIdentifier( int identifier )
{
txIdentMap.put( Thread.currentThread(), identifier );
}
void unregisterTxIdentifier()
{
txIdentMap.remove( Thread.currentThread() );
}
/**
* If the current thread is committing a transaction the identifier of that
* {@link XaTransaction} can be obtained invoking this method.
*
* @return the identifier of the transaction committing or <CODE>-1</CODE>
* if current thread isn't committing any transaction
*/
public int getCurrentTxIdentifier()
{
Integer intValue = txIdentMap.get( Thread.currentThread() );
if ( intValue != null )
{
return intValue;
}
return -1;
}
public ReadableByteChannel getLogicalLog( long version ) throws IOException
{
String name = fileName + ".v" + version;
if ( !new File( name ).exists() )
{
throw new IOException( "No such log version:" + version );
}
return new RandomAccessFile( name, "r" ).getChannel();
}
public long getLogicalLogLength( long version )
{
String name = fileName + ".v" + version;
File file = new File( name );
if ( !file.exists() )
{
return -1;
}
return file.length();
}
public boolean hasLogicalLog( long version )
{
String name = fileName + ".v" + version;
return new File( name ).exists();
}
public boolean deleteLogicalLog( long version )
{
String name = fileName + ".v" + version;
File file = new File(name );
if ( file.exists() )
{
return FileUtils.deleteFile( file );
}
return false;
}
public void makeBackupSlave()
{
if ( xidIdentMap.size() > 0 )
{
throw new IllegalStateException( "There are active transactions" );
}
backupSlave = true;
}
private static class LogApplier
{
private final ReadableByteChannel byteChannel;
private final ByteBuffer buffer;
private final XaTransactionFactory xaTf;
private final XaResourceManager xaRm;
private final XaCommandFactory xaCf;
private final ArrayMap<Integer,StartEntry> xidIdentMap;
private final Map<Integer,XaTransaction> recoveredTxMap;
LogApplier( ReadableByteChannel byteChannel, ByteBuffer buffer,
XaTransactionFactory xaTf, XaResourceManager xaRm,
XaCommandFactory xaCf, ArrayMap<Integer,StartEntry> xidIdentMap,
Map<Integer,XaTransaction> recoveredTxMap )
{
this.byteChannel = byteChannel;
this.buffer = buffer;
this.xaTf = xaTf;
this.xaRm = xaRm;
this.xaCf = xaCf;
this.xidIdentMap = xidIdentMap;
this.recoveredTxMap = recoveredTxMap;
}
boolean readAndApplyEntry() throws IOException
{
buffer.clear();
buffer.limit( 1 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
// ok no more entries we're done
return false;
}
buffer.flip();
byte entry = buffer.get();
switch ( entry )
{
case TX_START:
readTxStartEntry();
return true;
case TX_PREPARE:
readTxPrepareEntry();
return true;
case TX_1P_COMMIT:
readAndApplyTxOnePhaseCommit();
return true;
case TX_2P_COMMIT:
readAndApplyTxTwoPhaseCommit();
return true;
case COMMAND:
readCommandEntry();
return true;
case DONE:
readDoneEntry();
return true;
case EMPTY:
return false;
default:
throw new IOException( "Internal recovery failed, "
+ "unknown log entry[" + entry + "]" );
}
}
private void readTxStartEntry() throws IOException
{
// get the global id
buffer.clear();
buffer.limit( 1 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx start entry" );
}
buffer.flip();
byte globalIdLength = buffer.get();
// get the branchId id
buffer.clear();
buffer.limit( 1 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx start entry" );
}
buffer.flip();
byte branchIdLength = buffer.get();
byte globalId[] = new byte[globalIdLength];
ByteBuffer tmpBuffer = ByteBuffer.wrap( globalId );
if ( byteChannel.read( tmpBuffer ) != globalId.length )
{
throw new IOException( "Unable to read tx start entry" );
}
byte branchId[] = new byte[branchIdLength];
tmpBuffer = ByteBuffer.wrap( branchId );
if ( byteChannel.read( tmpBuffer ) != branchId.length )
{
throw new IOException( "Unable to read tx start entry" );
}
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx start entry" );
}
buffer.flip();
int identifier = buffer.getInt();
// get the format id
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx start entry" );
}
buffer.flip();
int formatId = buffer.getInt();
// re-create the transaction
Xid xid = new XidImpl( globalId, branchId, formatId );
xidIdentMap.put( identifier, new StartEntry( xid, -1 ) );
XaTransaction xaTx = xaTf.create( identifier );
xaTx.setRecovered();
recoveredTxMap.put( identifier, xaTx );
xaRm.injectStart( xid, xaTx );
}
private void readTxPrepareEntry() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx prepare entry" );
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unable to read tx prepeare entry" );
}
Xid xid = entry.getXid();
if ( xaRm.injectPrepare( xid ) )
{
// read only, we can remove
xidIdentMap.remove( identifier );
recoveredTxMap.remove( identifier );
}
}
private void readAndApplyTxOnePhaseCommit() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx 1PC entry" );
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unable to read tx prepeare entry" );
}
Xid xid = entry.getXid();
try
{
xaRm.commit( xid, true );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
}
private void readAndApplyTxTwoPhaseCommit() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx 2PC entry" );
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unable to read tx prepeare entry" );
}
Xid xid = entry.getXid();
try
{
xaRm.commit( xid, true );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
}
private void readCommandEntry() throws IOException
{
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
throw new IOException( "Unable to read tx command entry" );
}
buffer.flip();
int identifier = buffer.getInt();
XaCommand command = xaCf.readCommand( byteChannel, buffer );
if ( command == null )
{
throw new IOException( "Unable to read command entry" );
}
command.setRecovered();
XaTransaction xaTx = recoveredTxMap.get( identifier );
xaTx.injectCommand( command );
}
private boolean readDoneEntry() throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 4 );
if ( byteChannel.read( buffer ) != buffer.limit() )
{
return false;
}
buffer.flip();
int identifier = buffer.getInt();
StartEntry entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unable to read tx done entry" );
}
Xid xid = entry.getXid();
xaRm.pruneXidIfExist( xid );
xidIdentMap.remove( identifier );
recoveredTxMap.remove( identifier );
return true;
}
}
public synchronized void applyLog( ReadableByteChannel byteChannel )
throws IOException
{
if ( !backupSlave )
{
throw new IllegalStateException( "This is not a backup slave" );
}
if ( xidIdentMap.size() > 0 )
{
throw new IllegalStateException( "There are active transactions" );
}
buffer.clear();
buffer.limit( 8 );
if ( byteChannel.read( buffer ) != 8 )
{
throw new IOException( "Unable to read log version" );
}
buffer.flip();
logVersion = buffer.getLong();
if ( logVersion != xaTf.getCurrentVersion() )
{
throw new IllegalStateException( "Tried to apply version " +
logVersion + " but expected version " +
xaTf.getCurrentVersion() );
}
log.fine( "Logical log version: " + logVersion );
long logEntriesFound = 0;
LogApplier logApplier = new LogApplier( byteChannel, buffer, xaTf, xaRm,
cf, xidIdentMap, recoveredTxMap );
while ( logApplier.readAndApplyEntry() )
{
logEntriesFound++;
}
byteChannel.close();
xaTf.flushAll();
xaTf.getAndSetNewVersion();
xaRm.reset();
log.info( "Log[" + fileName + "] version " + logVersion +
" applied successfully." );
}
public synchronized void rotate() throws IOException
{
xaTf.flushAll();
String newLogFile = fileName + ".2";
String currentLogFile = fileName + ".1";
char newActiveLog = LOG2;
long currentVersion = xaTf.getCurrentVersion();
String oldCopy = fileName + ".v" + currentVersion;
if ( currentLog == CLEAN || currentLog == LOG2 )
{
newActiveLog = LOG1;
newLogFile = fileName + ".1";
currentLogFile = fileName + ".2";
}
else
{
assert currentLog == LOG1;
}
if ( new File( newLogFile ).exists() )
{
throw new IOException( "New log file: " + newLogFile +
" already exist" );
}
if ( new File( oldCopy ).exists() )
{
throw new IOException( "Copy log file: " + oldCopy +
" already exist" );
}
long endPosition = writeBuffer.getFileChannelPosition();
writeBuffer.force();
FileChannel newLog = new RandomAccessFile(
newLogFile, "rw" ).getChannel();
buffer.clear();
buffer.putLong( currentVersion + 1 ).flip();
if ( newLog.write( buffer ) != 8 )
{
throw new IOException( "Unable to write log version to new" );
}
fileChannel.position( 0 );
buffer.clear();
buffer.limit( 8 );
if( fileChannel.read( buffer ) != 8 )
{
throw new IOException( "Verification of log version failed" );
}
buffer.flip();
long verification = buffer.getLong();
if ( verification != currentVersion )
{
throw new IOException( "Verification of log version failed, " +
" expected " + currentVersion + " got " + verification );
}
if ( xidIdentMap.size() > 0 )
{
fileChannel.position( getFirstStartEntry( endPosition ) );
}
buffer.clear();
buffer.limit( 1 );
boolean emptyHit = false;
while ( fileChannel.read( buffer ) == 1 && !emptyHit )
{
buffer.flip();
byte entry = buffer.get();
switch ( entry )
{
case TX_START:
readAndWriteTxStartEntry( newLog );
break;
case TX_PREPARE:
readAndWriteTxPrepareEntry( newLog );
break;
case TX_1P_COMMIT:
readAndWriteTxOnePhaseCommit( newLog );
break;
case TX_2P_COMMIT:
readAndWriteTxTwoPhaseCommit( newLog );
break;
case COMMAND:
readAndWriteCommandEntry( newLog );
break;
case DONE:
readAndVerifyDoneEntry();
break;
case EMPTY:
emptyHit = true;
break;
default:
throw new IOException( "Log rotation failed, "
+ "unknown log entry[" + entry + "]" );
}
buffer.clear();
buffer.limit( 1 );
}
newLog.force( false );
releaseCurrentLogFile();
setActiveLog( newActiveLog );
if ( keepLogs )
{
renameCurrentLogFileAndIncrementVersion( currentLogFile,
endPosition );
}
else
{
deleteCurrentLogFile( currentLogFile );
xaTf.getAndSetNewVersion();
}
if ( xaTf.getCurrentVersion() != ( currentVersion + 1 ) )
{
throw new IOException( "version change failed" );
}
fileChannel = newLog;
if ( !useMemoryMapped )
{
writeBuffer = new DirectMappedLogBuffer( fileChannel );
}
else
{
writeBuffer = new MemoryMappedLogBuffer( fileChannel );
}
}
private long getFirstStartEntry( long endPosition )
{
long firstEntryPosition = endPosition;
for ( StartEntry entry : xidIdentMap.values() )
{
if ( entry.getStartPosition() < firstEntryPosition )
{
assert entry.getStartPosition() > 0;
firstEntryPosition = entry.getStartPosition();
}
}
return firstEntryPosition;
}
private void setActiveLog( char c ) throws IOException
{
if ( c != CLEAN && c != LOG1 && c != LOG2 )
{
throw new IllegalArgumentException( "Log must be either clean, " +
"1 or 2" );
}
if ( c == currentLog )
{
throw new IllegalStateException( "Log should not be equal to " +
"current " + currentLog );
}
ByteBuffer bb = ByteBuffer.wrap( new byte[4] );
bb.asCharBuffer().put( c ).flip();
FileChannel fc = new RandomAccessFile( fileName + ".active" ,
"rw" ).getChannel();
int wrote = fc.write( bb );
if ( wrote != 4 )
{
throw new IllegalStateException( "Expected to write 4 -> " + wrote );
}
fc.force( false );
fc.close();
currentLog = c;
}
// [COMMAND][identifier][COMMAND_DATA]
private void readAndWriteCommandEntry( FileChannel newLog )
throws IOException
{
buffer.clear();
buffer.put( COMMAND );
buffer.limit( 1 + 4 );
if ( fileChannel.read( buffer ) != 4 )
{
throw new IllegalStateException( "Unable to read command header" );
}
buffer.flip();
buffer.position( 1 );
int identifier = buffer.getInt();
FileChannel writeToLog = null;
if ( xidIdentMap.get( identifier ) != null )
{
writeToLog = newLog;
}
if ( writeToLog != null )
{
buffer.position( 0 );
if ( writeToLog.write( buffer ) != 5 )
{
throw new TransactionFailureException(
"Unable to write command header" );
}
}
XaCommand command = cf.readCommand( fileChannel, buffer );
if ( writeToLog != null )
{
command.writeToFile( new DirectLogBuffer( writeToLog, buffer ) );
}
}
private void readAndVerifyDoneEntry()
throws IOException
{
buffer.clear();
buffer.limit( 4 );
if ( fileChannel.read( buffer ) != 4 )
{
throw new IllegalStateException( "Unable to read done entry" );
}
buffer.flip();
int identifier = buffer.getInt();
if ( xidIdentMap.get( identifier ) != null )
{
throw new IllegalStateException( identifier +
" done entry found but still active" );
}
}
// [TX_1P_COMMIT][identifier]
private void readAndWriteTxOnePhaseCommit( FileChannel newLog )
throws IOException
{
buffer.clear();
buffer.limit( 1 + 4 );
buffer.put( TX_1P_COMMIT );
if ( fileChannel.read( buffer ) != 4 )
{
throw new IllegalStateException( "Unable to read 1P commit entry" );
}
buffer.flip();
buffer.position( 1 );
int identifier = buffer.getInt();
FileChannel writeToLog = null;
if ( xidIdentMap.get( identifier ) != null )
{
writeToLog = newLog;
}
buffer.position( 0 );
if ( writeToLog != null && writeToLog.write( buffer ) != 5 )
{
throw new TransactionFailureException(
"Unable to write 1P commit entry" );
}
}
private void readAndWriteTxTwoPhaseCommit( FileChannel newLog )
throws IOException
{
buffer.clear();
buffer.limit( 1 + 4 );
buffer.put( TX_2P_COMMIT );
if ( fileChannel.read( buffer ) != 4 )
{
throw new IllegalStateException( "Unable to read 2P commit entry" );
}
buffer.flip();
buffer.position( 1 );
int identifier = buffer.getInt();
FileChannel writeToLog = null;
if ( xidIdentMap.get( identifier ) != null )
{
// " 2PC found but still active" );
writeToLog = newLog;
}
buffer.position( 0 );
if ( writeToLog != null && writeToLog.write( buffer ) != 5 )
{
throw new TransactionFailureException(
"Unable to write 2P commit entry" );
}
}
private void readAndWriteTxPrepareEntry( FileChannel newLog )
throws IOException
{
// get the tx identifier
buffer.clear();
buffer.limit( 1 + 4 );
buffer.put( TX_PREPARE );
if ( fileChannel.read( buffer ) != 4 )
{
throw new IllegalStateException( "Unable to read prepare entry" );
}
buffer.flip();
buffer.position( 1 );
int identifier = buffer.getInt();
FileChannel writeToLog = null;
if ( xidIdentMap.get( identifier ) != null )
{
writeToLog = newLog;
}
buffer.position( 0 );
if ( writeToLog != null && writeToLog.write( buffer ) != 5 )
{
throw new TransactionFailureException(
"Unable to write prepare entry" );
}
}
// [TX_START][xid[gid.length,bid.lengh,gid,bid]][identifier][format id]
private void readAndWriteTxStartEntry( FileChannel newLog )
throws IOException
{
// get the global id
buffer.clear();
buffer.put( TX_START );
buffer.limit( 3 );
if ( fileChannel.read( buffer ) != 2 )
{
throw new IllegalStateException(
"Unable to read tx start entry xid id lengths" );
}
buffer.flip();
buffer.position( 1 );
byte globalIdLength = buffer.get();
byte branchIdLength = buffer.get();
int xidLength = globalIdLength + branchIdLength;
buffer.limit( 3 + xidLength + 8 );
buffer.position( 3 );
if ( fileChannel.read( buffer ) != 8 + xidLength )
{
throw new IllegalStateException( "Unable to read xid" );
}
buffer.flip();
buffer.position( 3 + xidLength );
int identifier = buffer.getInt();
FileChannel writeToLog = null;
StartEntry entry = xidIdentMap.get( identifier );
if ( entry != null )
{
writeToLog = newLog;
entry.setStartPosition( newLog.position() );
}
buffer.position( 0 );
if ( writeToLog != null &&
writeToLog.write( buffer ) != 3 + 8 + xidLength )
{
throw new TransactionFailureException(
"Unable to write tx start xid" );
}
}
public void setKeepLogs( boolean keep )
{
this.keepLogs = keep;
}
public boolean isLogsKept()
{
return this.keepLogs;
}
public void setAutoRotateLogs( boolean autoRotate )
{
this.autoRotate = autoRotate;
}
public boolean isLogsAutoRotated()
{
return this.autoRotate;
}
public void setLogicalLogTargetSize( long size )
{
this.rotateAtSize = size;
}
public long getLogicalLogTargetSize()
{
return this.rotateAtSize;
}
private static class StartEntry
{
private final Xid xid;
private long startEntryPosition;
StartEntry( Xid xid, long startPosition )
{
this.xid = xid;
this.startEntryPosition = startPosition;
}
Xid getXid()
{
return xid;
}
long getStartPosition()
{
return startEntryPosition;
}
void setStartPosition( long newPosition )
{
startEntryPosition = newPosition;
}
}
public String getFileName( long version )
{
return fileName + ".v" + version;
}
}
|
package org.neo4j.kernel.impl.transaction.xaframework;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.transaction.xa.XAException;
import javax.transaction.xa.Xid;
import org.neo4j.kernel.Config;
import org.neo4j.kernel.impl.util.ArrayMap;
import org.neo4j.kernel.impl.util.FileUtils;
/**
* <CODE>XaLogicalLog</CODE> is a transaction and logical log combined. In
* this log information about the transaction (such as started, prepared and
* committed) will be written. All commands participating in the transaction
* will also be written to the log.
* <p>
* Normally you don't have to do anything with this log except open it after it
* has been instanciated (see {@link XaContainer}). The only method that may be
* of use when implementing a XA compatible resource is the
* {@link #getCurrentTxIdentifier}. Leave everything else be unless you know
* what you're doing.
* <p>
* When the log is opened it will be scaned for uncompleted transactions and
* those transactions will be re-created. When scan of log is complete all
* transactions that hasn't entered prepared state will be marked as done
* (implies rolledback) and dropped. All transactions that have been prepared
* will be held in memory until the transaction manager tells them to commit.
* Transaction that already started commit but didn't get flagged as done will
* be re-committed.
*/
public class XaLogicalLog
{
private Logger log;
private static final char CLEAN = 'C';
private static final char LOG1 = '1';
private static final char LOG2 = '2';
private FileChannel fileChannel = null;
private final ByteBuffer buffer;
private LogBuffer writeBuffer = null;
private long previousLogLastCommittedTx = -1;
private long logVersion = 0;
private ArrayMap<Integer,LogEntry.Start> xidIdentMap =
new ArrayMap<Integer,LogEntry.Start>( 4, false, true );
private Map<Integer,XaTransaction> recoveredTxMap =
new HashMap<Integer,XaTransaction>();
private int nextIdentifier = 1;
private boolean scanIsComplete = false;
private String fileName = null;
private final XaResourceManager xaRm;
private final XaCommandFactory cf;
private final XaTransactionFactory xaTf;
private char currentLog = CLEAN;
private boolean keepLogs = false;
private boolean autoRotate = true;
private long rotateAtSize = 10*1024*1024; // 10MB
private boolean backupSlave = false;
private boolean slave = false;
private boolean useMemoryMapped = true;
XaLogicalLog( String fileName, XaResourceManager xaRm, XaCommandFactory cf,
XaTransactionFactory xaTf, Map<Object,Object> config )
{
this.fileName = fileName;
this.xaRm = xaRm;
this.cf = cf;
this.xaTf = xaTf;
this.useMemoryMapped = getMemoryMapped( config );
log = Logger.getLogger( this.getClass().getName() + "/" + fileName );
buffer = ByteBuffer.allocateDirect( 9 + Xid.MAXGTRIDSIZE
+ Xid.MAXBQUALSIZE * 10 );
}
private boolean getMemoryMapped( Map<Object,Object> config )
{
String configValue = config != null ?
(String) config.get( Config.USE_MEMORY_MAPPED_BUFFERS ) : null;
return configValue != null ? Boolean.parseBoolean( configValue ) : true;
}
synchronized void open() throws IOException
{
String activeFileName = fileName + ".active";
if ( !new File( activeFileName ).exists() )
{
if ( new File( fileName ).exists() )
{
// old < b8 xaframework with no log rotation and we need to
// do recovery on it
open( fileName );
}
else
{
open( fileName + ".1" );
setActiveLog( LOG1 );
}
}
else
{
FileChannel fc = new RandomAccessFile( activeFileName ,
"rw" ).getChannel();
byte bytes[] = new byte[256];
ByteBuffer buf = ByteBuffer.wrap( bytes );
int read = fc.read( buf );
fc.close();
if ( read != 4 )
{
throw new IllegalStateException( "Read " + read +
" bytes from " + activeFileName + " but expected 4" );
}
buf.flip();
char c = buf.asCharBuffer().get();
File copy = new File( fileName + ".copy" );
if ( copy.exists() )
{
if ( !copy.delete() )
{
log.warning( "Unable to delete " + copy.getName() );
}
}
if ( c == CLEAN )
{
// clean
String newLog = fileName + ".1";
File file = new File( newLog );
if ( file.exists() )
{
fixCleanKill( newLog );
}
file = new File( fileName + ".2" );
if ( file.exists() )
{
fixCleanKill( fileName + ".2" );
}
open( newLog );
setActiveLog( LOG1 );
}
else if ( c == LOG1 )
{
String newLog = fileName + ".1";
if ( !new File( newLog ).exists() )
{
throw new IllegalStateException(
"Active marked as 1 but no " + newLog + " exist" );
}
currentLog = LOG1;
File otherLog = new File( fileName + ".2" );
if ( otherLog.exists() )
{
if ( !otherLog.delete() )
{
log.warning( "Unable to delete " + copy.getName() );
}
}
open( newLog );
}
else if ( c == LOG2 )
{
String newLog = fileName + ".2";
if ( !new File( newLog ).exists() )
{
throw new IllegalStateException(
"Active marked as 2 but no " + newLog + " exist" );
}
File otherLog = new File( fileName + ".1" );
if ( otherLog.exists() )
{
if ( !otherLog.delete() )
{
log.warning( "Unable to delete " + copy.getName() );
}
}
currentLog = LOG2;
open( newLog );
}
else
{
throw new IllegalStateException( "Unknown active log: " + c );
}
}
if ( !useMemoryMapped )
{
writeBuffer = new DirectMappedLogBuffer( fileChannel );
}
else
{
writeBuffer = new MemoryMappedLogBuffer( fileChannel );
}
}
private void fixCleanKill( String fileName ) throws IOException
{
File file = new File( fileName );
if ( !keepLogs )
{
if ( !file.delete() )
{
throw new IllegalStateException(
"Active marked as clean and unable to delete log " +
fileName );
}
}
else
{
renameCurrentLogFileAndIncrementVersion( fileName, file.length() );
}
}
private void open( String fileToOpen ) throws IOException
{
fileChannel = new RandomAccessFile( fileToOpen, "rw" ).getChannel();
if ( fileChannel.size() != 0 )
{
doInternalRecovery( fileToOpen );
}
else
{
logVersion = xaTf.getCurrentVersion();
buffer.clear();
buffer.putLong( logVersion );
long lastTxId = xaTf.getLastCommittedTx();
buffer.putLong( lastTxId );
previousLogLastCommittedTx = lastTxId;
buffer.flip();
fileChannel.write( buffer );
scanIsComplete = true;
}
}
public boolean scanIsComplete()
{
return scanIsComplete;
}
private int getNextIdentifier()
{
nextIdentifier++;
if ( nextIdentifier < 0 )
{
nextIdentifier = 1;
}
return nextIdentifier;
}
// returns identifier for transaction
// [TX_START][xid[gid.length,bid.lengh,gid,bid]][identifier][format id]
public synchronized int start( Xid xid ) throws XAException
{
if ( backupSlave )
{
throw new XAException( "Resource is configured as backup slave, " +
"no new transactions can be started for " + fileName + "." +
currentLog );
}
int xidIdent = getNextIdentifier();
try
{
byte globalId[] = xid.getGlobalTransactionId();
byte branchId[] = xid.getBranchQualifier();
int formatId = xid.getFormatId();
long position = writeBuffer.getFileChannelPosition();
writeBuffer.put( LogEntry.TX_START ).put( (byte) globalId.length ).put(
(byte) branchId.length ).put( globalId ).put( branchId )
.putInt( xidIdent ).putInt( formatId );
xidIdentMap.put( xidIdent,
new LogEntry.Start( xid, xidIdent, position ) );
}
catch ( IOException e )
{
throw new XAException( "Logical log couldn't start transaction: "
+ e );
}
return xidIdent;
}
// [TX_PREPARE][identifier]
public synchronized void prepare( int identifier ) throws XAException
{
assert xidIdentMap.get( identifier ) != null;
try
{
writeBuffer.put( LogEntry.TX_PREPARE ).putInt( identifier );
writeBuffer.force();
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark prepare ["
+ identifier + "] " + e );
}
}
// [TX_1P_COMMIT][identifier]
public synchronized void commitOnePhase( int identifier, long txId )
throws XAException
{
assert xidIdentMap.get( identifier ) != null;
assert txId != -1;
try
{
writeBuffer.put( LogEntry.TX_1P_COMMIT ).putInt(
identifier ).putLong( txId );
writeBuffer.force();
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark 1P-commit ["
+ identifier + "] " + e );
}
}
// [DONE][identifier]
public synchronized void done( int identifier ) throws XAException
{
if ( backupSlave )
{
return;
}
assert xidIdentMap.get( identifier ) != null;
try
{
writeBuffer.put( LogEntry.DONE ).putInt( identifier );
xidIdentMap.remove( identifier );
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark as done ["
+ identifier + "] " + e );
}
}
// [DONE][identifier] called from XaResourceManager during internal recovery
synchronized void doneInternal( int identifier ) throws IOException
{
buffer.clear();
buffer.put( LogEntry.DONE ).putInt( identifier );
buffer.flip();
fileChannel.write( buffer );
xidIdentMap.remove( identifier );
}
// [TX_2P_COMMIT][identifier]
public synchronized void commitTwoPhase( int identifier, long txId )
throws XAException
{
assert xidIdentMap.get( identifier ) != null;
assert txId != -1;
try
{
writeBuffer.put( LogEntry.TX_2P_COMMIT ).putInt(
identifier ).putLong( txId );
writeBuffer.force();
}
catch ( IOException e )
{
throw new XAException( "Logical log unable to mark 2PC ["
+ identifier + "] " + e );
}
}
// [COMMAND][identifier][COMMAND_DATA]
public synchronized void writeCommand( XaCommand command, int identifier )
throws IOException
{
checkLogRotation();
assert xidIdentMap.get( identifier ) != null;
writeBuffer.put( LogEntry.COMMAND ).putInt( identifier );
command.writeToFile( writeBuffer ); // fileChannel, buffer );
}
private void applyEntry( LogEntry entry ) throws IOException
{
if ( entry instanceof LogEntry.Start )
{
applyStartEntry( (LogEntry.Start) entry );
}
else if ( entry instanceof LogEntry.Prepare )
{
applyPrepareEntry( (LogEntry.Prepare ) entry );
}
else if ( entry instanceof LogEntry.Command )
{
applyCommandEntry( (LogEntry.Command ) entry );
}
else if ( entry instanceof LogEntry.OnePhaseCommit )
{
applyOnePhaseCommitEntry( (LogEntry.OnePhaseCommit ) entry );
}
else if ( entry instanceof LogEntry.TwoPhaseCommit )
{
applyTwoPhaseCommitEntry( (LogEntry.TwoPhaseCommit ) entry );
}
else if ( entry instanceof LogEntry.Done )
{
applyDoneEntry( (LogEntry.Done ) entry );
}
}
private void applyStartEntry( LogEntry.Start entry) throws IOException
{
int identifier = entry.getIdentifier();
if ( identifier >= nextIdentifier )
{
nextIdentifier = (identifier + 1);
}
// re-create the transaction
Xid xid = entry.getXid();
xidIdentMap.put( identifier, entry );
XaTransaction xaTx = xaTf.create( identifier );
xaTx.setRecovered();
recoveredTxMap.put( identifier, xaTx );
xaRm.injectStart( xid, xaTx );
}
private void applyPrepareEntry( LogEntry.Prepare prepareEntry ) throws IOException
{
// get the tx identifier
int identifier = prepareEntry.getIdentifier();
LogEntry.Start entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unknown xid for identifier " + identifier );
}
Xid xid = entry.getXid();
if ( xaRm.injectPrepare( xid ) )
{
// read only we can remove
xidIdentMap.remove( identifier );
recoveredTxMap.remove( identifier );
}
}
private void applyOnePhaseCommitEntry( LogEntry.OnePhaseCommit commit )
throws IOException
{
int identifier = commit.getIdentifier();
long txId = commit.getTxId();
LogEntry.Start entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unknown xid for identifier " + identifier );
}
Xid xid = entry.getXid();
try
{
XaTransaction xaTx = xaRm.getXaTransaction( xid );
xaTx.setCommitTxId( txId );
xaRm.injectOnePhaseCommit( xid );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
}
private void applyDoneEntry( LogEntry.Done done ) throws IOException
{
// get the tx identifier
int identifier = done.getIdentifier();
LogEntry.Start entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unknown xid for identifier " + identifier );
}
Xid xid = entry.getXid();
xaRm.pruneXid( xid );
xidIdentMap.remove( identifier );
recoveredTxMap.remove( identifier );
}
private void applyTwoPhaseCommitEntry( LogEntry.TwoPhaseCommit commit ) throws IOException
{
int identifier = commit.getIdentifier();
long txId = commit.getTxId();
LogEntry.Start entry = xidIdentMap.get( identifier );
if ( entry == null )
{
throw new IOException( "Unknown xid for identifier " + identifier );
}
Xid xid = entry.getXid();
if ( xid == null )
{
throw new IOException( "Xid null for identifier " + identifier );
}
try
{
XaTransaction xaTx = xaRm.getXaTransaction( xid );
xaTx.setCommitTxId( txId );
xaRm.injectTwoPhaseCommit( xid );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
}
private void applyCommandEntry( LogEntry.Command entry ) throws IOException
{
int identifier = entry.getIdentifier();
XaCommand command = entry.getXaCommand();
if ( command == null )
{
throw new IOException( "Null command for identifier " + identifier );
}
command.setRecovered();
XaTransaction xaTx = recoveredTxMap.get( identifier );
xaTx.injectCommand( command );
}
private void checkLogRotation() throws IOException
{
if ( autoRotate &&
writeBuffer.getFileChannelPosition() >= rotateAtSize )
{
long currentPos = writeBuffer.getFileChannelPosition();
long firstStartEntry = getFirstStartEntry( currentPos );
// only rotate if no huge tx is running
if ( ( currentPos - firstStartEntry ) < rotateAtSize / 2 )
{
rotate();
}
}
}
private void renameCurrentLogFileAndIncrementVersion( String logFileName,
long endPosition ) throws IOException
{
// DumpLogicalLog.main( new String[] { logFileName } );
File file = new File( logFileName );
if ( !file.exists() )
{
throw new IOException( "Logical log[" + logFileName +
"] not found" );
}
String newName = fileName + ".v" + xaTf.getAndSetNewVersion();
File newFile = new File( newName );
boolean renamed = FileUtils.renameFile( file, newFile );
if ( !renamed )
{
throw new IOException( "Failed to rename log to: " + newName );
}
else
{
try
{
FileChannel channel = new RandomAccessFile( newName,
"rw" ).getChannel();
FileUtils.truncateFile( channel, endPosition );
}
catch ( IOException e )
{
log.log( Level.WARNING,
"Failed to truncate log at correct size", e );
}
}
// DumpLogicalLog.main( new String[] { newName } );
}
private void deleteCurrentLogFile( String logFileName ) throws IOException
{
File file = new File( logFileName );
if ( !file.exists() )
{
throw new IOException( "Logical log[" + logFileName +
"] not found" );
}
boolean deleted = FileUtils.deleteFile( file );
if ( !deleted )
{
log.warning( "Unable to delete clean logical log[" + logFileName +
"]" );
}
}
private void releaseCurrentLogFile() throws IOException
{
if ( writeBuffer != null )
{
writeBuffer.force();
writeBuffer = null;
}
fileChannel.close();
fileChannel = null;
}
public synchronized void close() throws IOException
{
if ( fileChannel == null || !fileChannel.isOpen() )
{
log.fine( "Logical log: " + fileName + " already closed" );
return;
}
long endPosition = writeBuffer.getFileChannelPosition();
if ( xidIdentMap.size() > 0 )
{
log.info( "Close invoked with " + xidIdentMap.size() +
" running transaction(s). " );
writeBuffer.force();
writeBuffer = null;
fileChannel.close();
log.info( "Dirty log: " + fileName + "." + currentLog +
" now closed. Recovery will be started automatically next " +
"time it is opened." );
return;
}
releaseCurrentLogFile();
char logWas = currentLog;
if ( currentLog != CLEAN ) // again special case, see above
{
setActiveLog( CLEAN );
}
if ( !keepLogs || backupSlave )
{
if ( logWas == CLEAN )
{
// special case going from old xa version with no log rotation
// and we started with a recovery
deleteCurrentLogFile( fileName );
}
else
{
deleteCurrentLogFile( fileName + "." + logWas );
}
}
else
{
renameCurrentLogFileAndIncrementVersion( fileName + "." +
logWas, endPosition );
}
}
private void doInternalRecovery( String logFileName ) throws IOException
{
log.info( "Non clean shutdown detected on log [" + logFileName +
"]. Recovery started ..." );
// get log creation time
buffer.clear();
buffer.limit( 16 );
if ( fileChannel.read( buffer ) != 16 )
{
log.info( "Unable to read timestamp information, "
+ "no records in logical log." );
fileChannel.close();
boolean success = FileUtils.renameFile( new File( logFileName ),
new File( logFileName + "_unknown_timestamp_" +
System.currentTimeMillis() + ".log" ) );
assert success;
fileChannel = new RandomAccessFile( logFileName,
"rw" ).getChannel();
return;
}
buffer.flip();
logVersion = buffer.getLong();
long lastCommittedTx = buffer.getLong();
previousLogLastCommittedTx = lastCommittedTx;
log.fine( "Logical log version: " + logVersion + " with committed tx[" +
lastCommittedTx + "]" );
long logEntriesFound = 0;
long lastEntryPos = fileChannel.position();
LogEntry entry;
while ( (entry = readEntry()) != null )
{
applyEntry( entry );
logEntriesFound++;
lastEntryPos = fileChannel.position();
}
// make sure we overwrite any broken records
fileChannel.position( lastEntryPos );
// zero out the slow way since windows don't support truncate very well
buffer.clear();
while ( buffer.hasRemaining() )
{
buffer.put( (byte)0 );
}
buffer.flip();
long endPosition = fileChannel.size();
do
{
long bytesLeft = fileChannel.size() - fileChannel.position();
if ( bytesLeft < buffer.capacity() )
{
buffer.limit( (int) bytesLeft );
}
fileChannel.write( buffer );
buffer.flip();
} while ( fileChannel.position() < endPosition );
fileChannel.position( lastEntryPos );
scanIsComplete = true;
log.fine( "Internal recovery completed, scanned " + logEntriesFound
+ " log entries." );
xaRm.checkXids();
if ( xidIdentMap.size() == 0 )
{
log.fine( "Recovery completed." );
}
else
{
log.fine( "[" + logFileName + "] Found " + xidIdentMap.size()
+ " prepared 2PC transactions." );
for ( LogEntry.Start startEntry : xidIdentMap.values() )
{
log.fine( "[" + logFileName + "] 2PC xid[" +
startEntry.getXid() + "]" );
}
}
recoveredTxMap.clear();
}
// for testing, do not use!
void reset()
{
xidIdentMap.clear();
recoveredTxMap.clear();
}
private LogEntry readEntry() throws IOException
{
long position = fileChannel.position();
LogEntry entry = LogIoUtils.readEntry( buffer, fileChannel, cf );
if ( entry instanceof LogEntry.Start )
{
((LogEntry.Start) entry).setStartPosition( position );
}
return entry;
}
private ArrayMap<Thread,Integer> txIdentMap =
new ArrayMap<Thread,Integer>( 5, true, true );
void registerTxIdentifier( int identifier )
{
txIdentMap.put( Thread.currentThread(), identifier );
}
void unregisterTxIdentifier()
{
txIdentMap.remove( Thread.currentThread() );
}
/**
* If the current thread is committing a transaction the identifier of that
* {@link XaTransaction} can be obtained invoking this method.
*
* @return the identifier of the transaction committing or <CODE>-1</CODE>
* if current thread isn't committing any transaction
*/
public int getCurrentTxIdentifier()
{
Integer intValue = txIdentMap.get( Thread.currentThread() );
if ( intValue != null )
{
return intValue;
}
return -1;
}
public ReadableByteChannel getLogicalLog( long version ) throws IOException
{
String name = fileName + ".v" + version;
if ( !new File( name ).exists() )
{
throw new IOException( "No such log version:" + version );
}
return new RandomAccessFile( name, "r" ).getChannel();
}
private List<LogEntry> extractPreparedTransactionFromLog( long identifier,
ReadableByteChannel log ) throws IOException
{
buffer.clear();
buffer.limit( 16 );
log.read( buffer );
List<LogEntry> logEntryList = new ArrayList<LogEntry>();
LogEntry entry;
while ( (entry = LogIoUtils.readEntry( buffer, log, cf )) != null )
{
if ( entry.getIdentifier() != identifier )
{
continue;
}
if ( entry instanceof LogEntry.Start || entry instanceof LogEntry.Command )
{
logEntryList.add( entry );
}
else
{
throw new RuntimeException( "Expected start or command entry but found: " + entry );
}
}
if ( logEntryList.isEmpty() )
{
throw new IOException( "Transaction for internal identifier[" + identifier +
"] not found in current log" );
}
return logEntryList;
}
private List<LogEntry> extractTransactionFromLog( long txId,
long expectedVersion, ReadableByteChannel log ) throws IOException
{
buffer.clear();
buffer.limit( 16 );
log.read( buffer );
buffer.flip();
long versionInLog = buffer.getLong();
assertExpectedVersion( expectedVersion, versionInLog );
long prevTxId = buffer.getLong();
assertLogCanContainTx( txId, prevTxId );
List<LogEntry> logEntryList = null;
Map<Integer,List<LogEntry>> transactions =
new HashMap<Integer,List<LogEntry>>();
LogEntry entry;
while ( (entry = LogIoUtils.readEntry( buffer, log, cf )) != null &&
logEntryList == null )
{
if ( entry instanceof LogEntry.Start )
{
List<LogEntry> list = new LinkedList<LogEntry>();
list.add( entry );
transactions.put( entry.getIdentifier(), list );
}
else if ( entry instanceof LogEntry.Commit )
{
if ( ((LogEntry.Commit) entry).getTxId() == txId )
{
logEntryList = transactions.get( entry.getIdentifier() );
logEntryList.add( entry );
}
else
{
transactions.remove( entry.getIdentifier() );
}
}
else if ( entry instanceof LogEntry.Command )
{
transactions.get( entry.getIdentifier() ).add( entry );
}
else if ( entry instanceof LogEntry.Done )
{
transactions.remove( entry.getIdentifier() );
}
else
{
throw new RuntimeException( "Unknown entry: " + entry );
}
}
if ( logEntryList == null )
{
throw new IOException( "Transaction[" + txId +
"] not found in log (" + expectedVersion + ", " +
prevTxId + ")" );
}
return logEntryList;
}
private void assertLogCanContainTx( long txId, long prevTxId ) throws IOException
{
if ( prevTxId >= txId )
{
throw new IOException( "Log says " + txId +
" can not exist in this log (prev tx id=" + prevTxId + ")" );
}
}
private void assertExpectedVersion( long expectedVersion, long versionInLog )
throws IOException
{
if ( versionInLog != expectedVersion )
{
throw new IOException( "Expected version " + expectedVersion +
" but got " + versionInLog );
}
}
private String generateUniqueName( String baseName )
{
String tmpName = baseName + "-" + System.currentTimeMillis();
while ( new File( tmpName ).exists() )
{
tmpName = baseName + "-" + System.currentTimeMillis() + "_";
}
return tmpName;
}
public synchronized ReadableByteChannel getPreparedTransaction( long identifier )
throws IOException
{
String name = fileName + ".ptx_" + identifier;
File txFile = new File( name );
if ( txFile.exists() )
{
return new RandomAccessFile( name, "r" ).getChannel();
}
ReadableByteChannel log = getLogicalLogOrMyself( logVersion );
List<LogEntry> logEntryList = extractPreparedTransactionFromLog( identifier, log );
log.close();
writeOutLogEntryList( logEntryList, name, "temporary-ptx-write-out-" + identifier );
return new RandomAccessFile( name, "r" ).getChannel();
}
private void writeOutLogEntryList( List<LogEntry> logEntryList, String name,
String tmpNameHint ) throws IOException
{
String tmpName = generateUniqueName( tmpNameHint );
FileChannel txLog = new RandomAccessFile( tmpName, "rw" ).getChannel();
LogBuffer buf = new DirectMappedLogBuffer( txLog );
for ( LogEntry entry : logEntryList )
{
LogIoUtils.writeLogEntry( entry, buf );
}
buf.force();
txLog.close();
if ( !new File( tmpName ).renameTo( new File( name ) ) )
{
throw new IOException( "Failed to rename " + tmpName + " to " +
name );
}
}
public synchronized ReadableByteChannel getCommittedTransaction( long txId )
throws IOException
{
// check if written out
String name = fileName + ".tx_" + txId;
File txFile = new File( name );
if ( txFile.exists() )
{
return new RandomAccessFile( name, "r" ).getChannel();
}
long version = findLogContainingTxId( txId );
if ( version == -1 )
{
throw new RuntimeException( "txId:" + txId + " not found in any logical log " +
"(starting at " + logVersion + " and searching backwards" );
}
// extract transaction
ReadableByteChannel log = getLogicalLogOrMyself( version );
List<LogEntry> logEntryList =
extractTransactionFromLog( txId, version, log );
log.close();
writeOutLogEntryList( logEntryList, name, "temporary-tx-write-out-" + txId );
ReadableByteChannel result = new RandomAccessFile( name, "r" ).getChannel();
return result;
}
private ReadableByteChannel getLogicalLogOrMyself( long version ) throws IOException
{
if ( version < logVersion )
{
return getLogicalLog( version );
}
else if ( version == logVersion )
{
String currentLogName =
fileName + (currentLog == LOG1 ? ".1" : ".2" );
return new RandomAccessFile( currentLogName, "r" ).getChannel();
}
else
{
throw new RuntimeException( "Version[" + version +
"] is higher then current log version[" + logVersion + "]" );
}
}
private long findLogContainingTxId( long txId ) throws IOException
{
long version = logVersion;
long committedTx = previousLogLastCommittedTx;
while ( version >= 0 )
{
ReadableByteChannel log = getLogicalLogOrMyself( version );
ByteBuffer buf = ByteBuffer.allocate( 16 );
if ( log.read( buf ) != 16 )
{
throw new IOException( "Unable to read log version " +
version );
}
buf.flip();
long readVersion = buf.getLong();
if ( readVersion != version )
{
throw new IOException( "Got " + readVersion +
" from log when expecting " + version );
}
committedTx = buf.getLong();
log.close();
if ( committedTx <= txId )
{
break;
}
version
}
return version;
}
public long getLogicalLogLength( long version )
{
String name = fileName + ".v" + version;
File file = new File( name );
if ( !file.exists() )
{
return -1;
}
return file.length();
}
public boolean hasLogicalLog( long version )
{
String name = fileName + ".v" + version;
return new File( name ).exists();
}
public boolean deleteLogicalLog( long version )
{
String name = fileName + ".v" + version;
File file = new File(name );
if ( file.exists() )
{
return FileUtils.deleteFile( file );
}
return false;
}
public void makeBackupSlave()
{
if ( xidIdentMap.size() > 0 )
{
throw new IllegalStateException( "There are active transactions" );
}
backupSlave = true;
}
private class LogApplier
{
private final ReadableByteChannel byteChannel;
private LogEntry.Start startEntry;
LogApplier( ReadableByteChannel byteChannel )
{
this.byteChannel = byteChannel;
}
boolean readAndApplyEntry() throws IOException
{
LogEntry entry = LogIoUtils.readEntry( buffer, byteChannel, cf );
if ( entry != null )
{
applyEntry( entry );
}
return entry != null;
}
boolean readAndApplyAndWriteEntry( int newXidIdentifier ) throws IOException
{
LogEntry entry = LogIoUtils.readEntry( buffer, byteChannel, cf );
if ( entry != null )
{
entry.setIdentifier( newXidIdentifier );
if ( entry instanceof LogEntry.Commit )
{
// hack to get done record written after commit record
LogIoUtils.writeLogEntry( entry, writeBuffer );
applyEntry( entry );
}
else
{
if ( entry instanceof LogEntry.Start )
{
startEntry = (LogEntry.Start) entry;
}
applyEntry( entry );
LogIoUtils.writeLogEntry( entry, writeBuffer );
}
return true;
}
return false;
}
}
public synchronized void applyLog( ReadableByteChannel byteChannel )
throws IOException
{
if ( !backupSlave )
{
throw new IllegalStateException( "This is not a backup slave" );
}
if ( xidIdentMap.size() > 0 )
{
throw new IllegalStateException( "There are active transactions" );
}
buffer.clear();
buffer.limit( 16 );
if ( byteChannel.read( buffer ) != 16 )
{
throw new IOException( "Unable to read log version" );
}
buffer.flip();
logVersion = buffer.getLong();
long previousCommittedTx = buffer.getLong();
if ( logVersion != xaTf.getCurrentVersion() )
{
throw new IllegalStateException( "Tried to apply version " +
logVersion + " but expected version " +
xaTf.getCurrentVersion() );
}
log.fine( "Logical log version: " + logVersion +
"(previous committed tx=" + previousCommittedTx + ")" );
long logEntriesFound = 0;
LogApplier logApplier = new LogApplier( byteChannel );
while ( logApplier.readAndApplyEntry() )
{
logEntriesFound++;
}
byteChannel.close();
xaTf.flushAll();
xaTf.getAndSetNewVersion();
xaRm.reset();
log.info( "Log[" + fileName + "] version " + logVersion +
" applied successfully." );
}
public synchronized void applyTransactionWithoutTxId( ReadableByteChannel byteChannel,
long nextTxId ) throws IOException
{
if ( nextTxId != (xaTf.getLastCommittedTx() + 1) )
{
throw new IllegalStateException( "Tried to apply tx " +
nextTxId + " but expected transaction " +
(xaTf.getCurrentVersion() + 1) );
}
log.fine( "Logical log version: " + logVersion +
", committing tx=" + nextTxId + ")" );
// System.out.println( "applyTxWithoutTxId#start @ pos: " + writeBuffer.getFileChannelPosition() );
long logEntriesFound = 0;
LogApplier logApplier = new LogApplier( byteChannel );
int xidIdent = getNextIdentifier();
while ( logApplier.readAndApplyAndWriteEntry( xidIdent ) )
{
logEntriesFound++;
}
byteChannel.close();
LogEntry.Start entry = logApplier.startEntry;
if ( entry == null )
{
throw new IOException( "Unable to find start entry" );
}
// System.out.println( "applyTxWithoutTxId#before 1PC @ pos: " + writeBuffer.getFileChannelPosition() );
LogEntry.OnePhaseCommit commit = new LogEntry.OnePhaseCommit(
xidIdent, nextTxId );
LogIoUtils.writeLogEntry( commit, writeBuffer );
Xid xid = entry.getXid();
try
{
XaTransaction xaTx = xaRm.getXaTransaction( xid );
xaTx.setCommitTxId( nextTxId );
xaRm.commit( xid, true );
}
catch ( XAException e )
{
e.printStackTrace();
throw new IOException( e.getMessage() );
}
// LogEntry.Done done = new LogEntry.Done( entry.getIdentifier() );
// LogIoUtils.writeLogEntry( done, writeBuffer );
// xaTf.setLastCommittedTx( nextTxId ); // done in doCommit
log.info( "Tx[" + nextTxId + "] " + " applied successfully." );
// System.out.println( "applyTxWithoutTxId#end @ pos: " + writeBuffer.getFileChannelPosition() );
}
public synchronized void applyTransaction( ReadableByteChannel byteChannel )
throws IOException
{
// System.out.println( "applyFullTx#start @ pos: " + writeBuffer.getFileChannelPosition() );
long logEntriesFound = 0;
LogApplier logApplier = new LogApplier( byteChannel );
int xidIdent = getNextIdentifier();
while ( logApplier.readAndApplyAndWriteEntry( xidIdent ) )
{
logEntriesFound++;
}
byteChannel.close();
// System.out.println( "applyFullTx#end @ pos: " + writeBuffer.getFileChannelPosition() );
}
public synchronized void rotate() throws IOException
{
xaTf.flushAll();
String newLogFile = fileName + ".2";
String currentLogFile = fileName + ".1";
char newActiveLog = LOG2;
long currentVersion = xaTf.getCurrentVersion();
String oldCopy = fileName + ".v" + currentVersion;
if ( currentLog == CLEAN || currentLog == LOG2 )
{
newActiveLog = LOG1;
newLogFile = fileName + ".1";
currentLogFile = fileName + ".2";
}
else
{
assert currentLog == LOG1;
}
if ( new File( newLogFile ).exists() )
{
throw new IOException( "New log file: " + newLogFile +
" already exist" );
}
if ( new File( oldCopy ).exists() )
{
throw new IOException( "Copy log file: " + oldCopy +
" already exist" );
}
// DumpLogicalLog.main( new String[] { currentLogFile } );
long endPosition = writeBuffer.getFileChannelPosition();
writeBuffer.force();
FileChannel newLog = new RandomAccessFile(
newLogFile, "rw" ).getChannel();
buffer.clear();
buffer.putLong( currentVersion + 1 );
long lastTx = xaTf.getLastCommittedTx();
buffer.putLong( lastTx ).flip();
previousLogLastCommittedTx = lastTx;
if ( newLog.write( buffer ) != 16 )
{
throw new IOException( "Unable to write log version to new" );
}
fileChannel.position( 0 );
buffer.clear();
buffer.limit( 16 );
if( fileChannel.read( buffer ) != 16 )
{
throw new IOException( "Verification of log version failed" );
}
buffer.flip();
long verification = buffer.getLong();
if ( verification != currentVersion )
{
throw new IOException( "Verification of log version failed, " +
" expected " + currentVersion + " got " + verification );
}
if ( xidIdentMap.size() > 0 )
{
fileChannel.position( getFirstStartEntry( endPosition ) );
}
LogEntry entry;
while ((entry = LogIoUtils.readEntry( buffer, fileChannel, cf )) != null )
{
if ( xidIdentMap.get( entry.getIdentifier() ) != null )
{
if ( entry instanceof LogEntry.Start )
{
((LogEntry.Start) entry).setStartPosition( newLog.position() );
}
LogBuffer newLogBuffer = new DirectLogBuffer( newLog, buffer );
LogIoUtils.writeLogEntry( entry, newLogBuffer );
}
}
newLog.force( false );
releaseCurrentLogFile();
setActiveLog( newActiveLog );
if ( keepLogs )
{
renameCurrentLogFileAndIncrementVersion( currentLogFile,
endPosition );
}
else
{
deleteCurrentLogFile( currentLogFile );
xaTf.getAndSetNewVersion();
}
if ( xaTf.getCurrentVersion() != ( currentVersion + 1 ) )
{
throw new IOException( "version change failed" );
}
fileChannel = newLog;
if ( !useMemoryMapped )
{
writeBuffer = new DirectMappedLogBuffer( fileChannel );
}
else
{
writeBuffer = new MemoryMappedLogBuffer( fileChannel );
}
}
private long getFirstStartEntry( long endPosition )
{
long firstEntryPosition = endPosition;
for ( LogEntry.Start entry : xidIdentMap.values() )
{
if ( entry.getStartPosition() < firstEntryPosition )
{
assert entry.getStartPosition() > 0;
firstEntryPosition = entry.getStartPosition();
}
}
return firstEntryPosition;
}
private void setActiveLog( char c ) throws IOException
{
if ( c != CLEAN && c != LOG1 && c != LOG2 )
{
throw new IllegalArgumentException( "Log must be either clean, " +
"1 or 2" );
}
if ( c == currentLog )
{
throw new IllegalStateException( "Log should not be equal to " +
"current " + currentLog );
}
ByteBuffer bb = ByteBuffer.wrap( new byte[4] );
bb.asCharBuffer().put( c ).flip();
FileChannel fc = new RandomAccessFile( fileName + ".active" ,
"rw" ).getChannel();
int wrote = fc.write( bb );
if ( wrote != 4 )
{
throw new IllegalStateException( "Expected to write 4 -> " + wrote );
}
fc.force( false );
fc.close();
currentLog = c;
}
public void setKeepLogs( boolean keep )
{
this.keepLogs = keep;
}
public boolean isLogsKept()
{
return this.keepLogs;
}
public void setAutoRotateLogs( boolean autoRotate )
{
this.autoRotate = autoRotate;
}
public boolean isLogsAutoRotated()
{
return this.autoRotate;
}
public void setLogicalLogTargetSize( long size )
{
this.rotateAtSize = size;
}
public long getLogicalLogTargetSize()
{
return this.rotateAtSize;
}
public String getFileName( long version )
{
return fileName + ".v" + version;
}
}
|
package org.spongepowered.server.mixin.server;
import net.minecraft.crash.CrashReport;
import net.minecraft.network.NetworkSystem;
import net.minecraft.network.ServerStatusResponse;
import net.minecraft.network.play.server.SPacketTimeUpdate;
import net.minecraft.profiler.Profiler;
import net.minecraft.server.MinecraftServer;
import net.minecraft.server.management.PlayerList;
import net.minecraft.util.ITickable;
import net.minecraft.util.ReportedException;
import net.minecraft.util.Util;
import net.minecraft.util.text.ITextComponent;
import net.minecraft.world.WorldServer;
import org.apache.logging.log4j.Logger;
import org.spongepowered.api.event.SpongeEventFactory;
import org.spongepowered.api.event.cause.Cause;
import org.spongepowered.api.event.cause.NamedCause;
import org.spongepowered.api.world.World;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Overwrite;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.LocalCapture;
import org.spongepowered.common.SpongeImpl;
import org.spongepowered.common.interfaces.IMixinMinecraftServer;
import org.spongepowered.common.text.SpongeTexts;
import org.spongepowered.server.SpongeVanilla;
import org.spongepowered.server.world.VanillaDimensionManager;
import java.util.Hashtable;
import java.util.List;
import java.util.Queue;
import java.util.concurrent.FutureTask;
@Mixin(MinecraftServer.class)
public abstract class MixinMinecraftServer implements IMixinMinecraftServer {
@Shadow @Final private static Logger logger;
@Shadow @Final private List<ITickable> playersOnline;
@Shadow @Final public Profiler theProfiler;
@Shadow private PlayerList playerList;
@Shadow private int tickCounter;
@Shadow @Final protected Queue<FutureTask<?>> futureTaskQueue;
@Shadow public WorldServer[] worldServers;
@Shadow public abstract boolean getAllowNether();
@Shadow public abstract NetworkSystem getNetworkSystem();
private boolean skipServerStop;
private final Hashtable<Integer, long[]> worldTickTimes = new Hashtable<>();
private static final String DEDICATED_SERVER_INIT = "Lnet/minecraft/server/dedicated/DedicatedServer.<init> (Ljava/io/File;Lnet/minecraft/util/datafix/DataFixer;Lcom/mojang/authlib/yggdrasil/YggdrasilAuthenticationService;Lcom/mojang/authlib/minecraft/MinecraftSessionService;Lcom/mojang/authlib/GameProfileRepository;Lnet/minecraft/server/management/PlayerProfileCache;)V";
/**
* @author Minecrell
* @reason Sets the server brand name to SpongeVanilla
*/
@Overwrite
public String getServerModName() {
return SpongeVanilla.INSTANCE.getName();
}
/**
* @author Minecrell
* @reason Logs chat messages with legacy color codes to show colored
* messages in the console
*/
@Overwrite
public void addChatMessage(ITextComponent component) {
logger.info(SpongeTexts.toLegacy(component));
}
@Override
public Hashtable<Integer, long[]> getWorldTickTimes() {
return this.worldTickTimes;
}
@Inject(method = "stopServer()V", at = @At("HEAD"), cancellable = true)
private void preventDoubleStop(CallbackInfo ci) {
if (this.skipServerStop) {
ci.cancel();
} else {
// Prevent the server from stopping twice
this.skipServerStop = true;
}
}
@Inject(method = "stopServer", at = @At(value = "INVOKE", target = "Lorg/apache/logging/log4j/Logger;info(Ljava/lang/String;)V", ordinal = 0,
shift = At.Shift.AFTER, remap = false))
private void callServerStopping(CallbackInfo ci) {
SpongeVanilla.INSTANCE.onServerStopping();
}
@Inject(method = "applyServerIconToResponse", at = @At("HEAD"), cancellable = true)
private void onAddFaviconToStatusResponse(ServerStatusResponse response, CallbackInfo ci) {
// Don't load favicon twice
if (response.getFavicon() != null) {
ci.cancel();
}
}
@Inject(method = "stopServer", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/WorldServer;flush()V"),
locals = LocalCapture.CAPTURE_FAILHARD)
private void callWorldUnload(CallbackInfo ci, int i) {
SpongeImpl.postEvent(SpongeEventFactory.createUnloadWorldEvent(Cause.of(NamedCause.source(this)), (World) this.worldServers[i]));
}
/**
* @author Zidane
* @reason Handles ticking the additional worlds loaded by Sponge.
*/
@Overwrite
public void updateTimeLightAndEntities() {
this.theProfiler.startSection("jobs");
synchronized (this.futureTaskQueue) {
while (!this.futureTaskQueue.isEmpty()) {
Util.runTask(this.futureTaskQueue.poll(), logger);
}
}
this.theProfiler.endStartSection("levels");
// Sponge start - Iterate over all our dimensions
Integer[] ids = VanillaDimensionManager.getIDs(this.tickCounter % 200 == 0);
for (int j = 0; j < ids.length; ++j) {
int id = ids[j];
// Sponge end
long i = System.nanoTime();
if (j == 0 || this.getAllowNether()) {
// Sponge start - Get world from our dimension manager
WorldServer worldserver = VanillaDimensionManager.getWorldFromDimId(id);
// Sponge end
this.theProfiler.startSection(worldserver.getWorldInfo().getWorldName());
if (this.tickCounter % 20 == 0) {
this.theProfiler.startSection("timeSync");
this.playerList.sendPacketToAllPlayersInDimension (
new SPacketTimeUpdate(worldserver.getTotalWorldTime(), worldserver.getWorldTime(),
worldserver.getGameRules().getBoolean("doDaylightCycle")), worldserver.provider.getDimensionType().getId());
this.theProfiler.endSection();
}
this.theProfiler.startSection("tick");
try {
worldserver.tick();
} catch (Throwable throwable1) {
CrashReport crashreport = CrashReport.makeCrashReport(throwable1, "Exception ticking world");
worldserver.addWorldInfoToCrashReport(crashreport);
throw new ReportedException(crashreport);
}
try {
worldserver.updateEntities();
} catch (Throwable throwable) {
CrashReport crashreport1 = CrashReport.makeCrashReport(throwable, "Exception ticking world entities");
worldserver.addWorldInfoToCrashReport(crashreport1);
throw new ReportedException(crashreport1);
}
this.theProfiler.endSection();
this.theProfiler.startSection("tracker");
worldserver.getEntityTracker().updateTrackedEntities();
this.theProfiler.endSection();
this.theProfiler.endSection();
}
// Sponge start - Write tick times to our custom map
this.worldTickTimes.get(id)[this.tickCounter % 100] = System.nanoTime() - i;
// Sponge end
}
// Sponge start - Unload requested worlds
this.theProfiler.endStartSection("dim_unloading");
VanillaDimensionManager.unloadWorlds(this.worldTickTimes);
// Sponge end
this.theProfiler.endStartSection("connection");
this.getNetworkSystem().networkTick();
this.theProfiler.endStartSection("players");
this.playerList.onTick();
this.theProfiler.endStartSection("tickables");
for (int k = 0; k < this.playersOnline.size(); ++k) {
this.playersOnline.get(k).update();
}
this.theProfiler.endSection();
}
}
|
package org.squiddev.cctweaks.core.integration.multipart;
import codechicken.lib.data.MCDataInput;
import codechicken.lib.data.MCDataOutput;
import codechicken.lib.raytracer.IndexedCuboid6;
import codechicken.lib.render.TextureUtils;
import codechicken.lib.vec.Cuboid6;
import codechicken.lib.vec.Vector3;
import codechicken.multipart.TSlottedPart;
import com.google.common.base.Objects;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import dan200.computercraft.ComputerCraft;
import dan200.computercraft.api.peripheral.IPeripheral;
import dan200.computercraft.client.render.FixedRenderBlocks;
import dan200.computercraft.shared.peripheral.PeripheralType;
import dan200.computercraft.shared.peripheral.common.IPeripheralTile;
import dan200.computercraft.shared.peripheral.common.PeripheralItemFactory;
import dan200.computercraft.shared.peripheral.modem.TileCable;
import dan200.computercraft.shared.util.IDAssigner;
import dan200.computercraft.shared.util.PeripheralUtil;
import net.minecraft.block.Block;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.*;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.util.ForgeDirection;
import org.squiddev.cctweaks.CCTweaks;
import org.squiddev.cctweaks.api.network.INetworkNode;
import org.squiddev.cctweaks.api.network.NetworkHelpers;
import org.squiddev.cctweaks.api.network.NetworkVisitor;
import org.squiddev.cctweaks.api.network.Packet;
import org.squiddev.cctweaks.core.network.SinglePeripheralModem;
import org.squiddev.cctweaks.core.utils.ComputerAccessor;
import org.squiddev.cctweaks.core.utils.DebugLogger;
import java.io.File;
import java.lang.reflect.Field;
import java.util.Collections;
import java.util.Map;
public class ModemPart extends AbstractPart implements INetworkNode, IPeripheralTile, TSlottedPart {
private static IIcon[] icons;
public static final String NAME = CCTweaks.NAME + ":networkModem";
protected byte direction = 0;
protected WiredModem modem = new WiredModem();
public ModemPart() {
}
public ModemPart(int direction) {
this.direction = (byte) direction;
}
public ModemPart(TileCable modem) {
this.direction = (byte) modem.getDirection();
try {
this.modem.id = ComputerAccessor.cablePeripheralId.getInt(modem);
this.modem.setState((byte) modem.getAnim());
} catch (Exception e) {
DebugLogger.error("Cannot get modem from tile");
e.printStackTrace();
}
}
@SideOnly(Side.CLIENT)
private ModemRenderer render;
@SideOnly(Side.CLIENT)
public ModemRenderer getRender() {
ModemRenderer draw = render;
if (draw == null) {
draw = render = new ModemRenderer();
}
return draw;
}
private final Object lock = new Object();
@Override
public String getType() {
return NAME;
}
@Override
public int getSlotMask() {
return 1 << direction;
}
@Override
public Iterable<Cuboid6> getOcclusionBoxes() {
Cuboid6 box;
switch (direction) {
case 0:
default:
box = new Cuboid6(0.125D, 0.0D, 0.125D, 0.875D, 0.125D, 0.875D);
break;
case 1:
box = new Cuboid6(0.125D, 0.875D, 0.125D, 0.875D, 1.0D, 0.875D);
break;
case 2:
box = new Cuboid6(0.125D, 0.125D, 0.0D, 0.875D, 0.875D, 0.125D);
break;
case 3:
box = new Cuboid6(0.125D, 0.125D, 0.875D, 0.875D, 0.875D, 1.0D);
break;
case 4:
box = new Cuboid6(0.0D, 0.125D, 0.125D, 0.125D, 0.875D, 0.875D);
break;
case 5:
box = new Cuboid6(0.875D, 0.125D, 0.125D, 1.0D, 0.875D, 0.875D);
break;
}
return Collections.singletonList(box);
}
@Override
public Iterable<IndexedCuboid6> getSubParts() {
return Collections.singletonList(new IndexedCuboid6(direction, getBounds()));
}
@Override
public Cuboid6 getBounds() {
switch (direction) {
case 0:
default:
return new Cuboid6(0.125D, 0.0D, 0.125D, 0.875D, 0.1875D, 0.875D);
case 1:
return new Cuboid6(0.125D, 0.8125D, 0.125D, 0.875D, 1.0D, 0.875D);
case 2:
return new Cuboid6(0.125D, 0.125D, 0.0D, 0.875D, 0.875D, 0.1875D);
case 3:
return new Cuboid6(0.125D, 0.125D, 0.8125D, 0.875D, 0.875D, 1.0D);
case 4:
return new Cuboid6(0.0D, 0.125D, 0.125D, 0.1875D, 0.875D, 0.875D);
case 5:
return new Cuboid6(0.8125D, 0.125D, 0.125D, 1.0D, 0.875D, 0.875D);
}
}
@Override
public Cuboid6 getRenderBounds() {
return getBounds();
}
@Override
public void harvest(MovingObjectPosition hit, EntityPlayer player) {
World world = world();
int x = x(), y = y(), z = z();
super.harvest(hit, player);
if (!world.isRemote) {
NetworkHelpers.fireNetworkChanged(world, x, y, z);
}
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getBrokenIcon(int side) {
return ComputerCraft.Blocks.cable.getIcon(0, 0);
}
@Override
@SideOnly(Side.CLIENT)
public boolean renderStatic(Vector3 pos, int pass) {
TextureUtils.bindAtlas(0);
getRender().drawTile(world(), x(), y(), z());
return true;
}
@Override
public ItemStack pickItem(MovingObjectPosition hit) {
return PeripheralItemFactory.create(PeripheralType.WiredModem, null, 1);
}
@Override
public void update() {
if (world().isRemote) return;
if (modem.modem.pollChanged()) markDirty();
modem.processQueue(tile());
if (!modem.peripheralsKnown) modem.findPeripherals(tile());
}
@Override
public boolean activate(EntityPlayer player, MovingObjectPosition hit, ItemStack item) {
if (player.isSneaking()) return false;
if (world().isRemote) return true;
String name = modem.getPeripheralName();
modem.toggleEnabled();
String newName = modem.getPeripheralName();
if (!Objects.equal(name, newName)) {
if (name != null) {
player.addChatMessage(new ChatComponentTranslation("gui.computercraft:wired_modem.peripheral_disconnected", name));
}
if (newName != null) {
player.addChatMessage(new ChatComponentTranslation("gui.computercraft:wired_modem.peripheral_connected", newName));
}
NetworkHelpers.fireNetworkInvalidate(world(), x(), y(), z());
markDirty();
}
return true;
}
/**
* Marks the modem as dirty to trigger a block update and client sync
*/
public void markDirty() {
modem.refreshState();
tile().notifyPartChange(this);
tile().markDirty();
sendDescUpdate();
}
@Override
public void writeDesc(MCDataOutput packet) {
packet.writeByte(direction);
packet.writeByte(modem.state);
}
@Override
public void readDesc(MCDataInput packet) {
direction = packet.readByte();
modem.setState(packet.readByte());
}
@Override
public void save(NBTTagCompound tag) {
tag.setByte("modem_direction", direction);
tag.setByte("modem_state", modem.state);
tag.setInteger("modem_id", modem.id);
}
@Override
public void load(NBTTagCompound tag) {
direction = tag.getByte("modem_direction");
modem.setState(tag.getByte("modem_state"));
modem.id = tag.getInteger("modem_id");
}
@Override
public boolean canBeVisited(ForgeDirection from) {
return true;
}
@Override
public boolean canVisitTo(ForgeDirection to) {
return true;
}
@Override
public Map<String, IPeripheral> getConnectedPeripherals() {
return modem.getConnectedPeripherals();
}
@Override
public void receivePacket(Packet packet, int distanceTravelled) {
modem.receivePacket(packet, distanceTravelled);
}
@Override
public void invalidateNetwork() {
modem.invalidateNetwork();
}
@Override
public void networkChanged() {
if (!world().isRemote) {
NetworkHelpers.fireNetworkInvalidate(world(), x(), y(), z());
modem.networkChanged();
}
}
@Override
public Iterable<NetworkVisitor.SearchLoc> getExtraNodes() {
return null;
}
@Override
public Object lock() {
return lock;
}
@Override
public int getDirection() {
return direction;
}
@Override
public void setDirection(int direction) {
}
@Override
public PeripheralType getPeripheralType() {
return PeripheralType.WiredModem;
}
@Override
public IPeripheral getPeripheral(int side) {
if (side == direction) return modem.modem;
return null;
}
@Override
public String getLabel() {
return null;
}
public class ModemRenderer extends FixedRenderBlocks {
public IIcon[] getIcons() {
IIcon[] icons;
if ((icons = ModemPart.icons) == null) {
try {
Field field = TileCable.class.getDeclaredField("s_modemIcons");
field.setAccessible(true);
icons = (IIcon[]) field.get(null);
} catch (ReflectiveOperationException e) {
DebugLogger.error("Cannot find TileCable texture");
e.printStackTrace();
icons = new IIcon[8];
}
ModemPart.icons = icons;
}
return icons;
}
@Override
public IIcon getBlockIcon(Block block, IBlockAccess world, int x, int y, int z, int side) {
int dir = direction;
int texture = modem.state * 2;
IIcon[] icons = getIcons();
if (side == dir) {
// Use the dark side for the peripheral side
return icons[texture + 1];
} else if (dir == 0 || dir == 1 || side == Facing.oppositeSide[dir]) {
// Use the cable texture for the cable side or if the modem
// is facing up/down to prevent textures being on the wrong side
return icons[texture];
} else if (side == 2 || side == 5) {
// If the side is north/east use the side texture to prevent
// the dark line being on the wrong side
return icons[texture + 1];
}
return icons[texture];
}
public void drawTile(IBlockAccess world, int x, int y, int z) {
setWorld(world);
Block block = ComputerCraft.Blocks.cable;
Cuboid6 bounds = getBounds();
setRenderBounds(bounds.min.x, bounds.min.y, bounds.min.z, bounds.max.x, bounds.max.y, bounds.max.z);
renderStandardBlock(block, x, y, z);
}
}
public class WiredModem extends SinglePeripheralModem {
@Override
public IPeripheral getPeripheral() {
int dir = direction;
int x = x() + Facing.offsetsXForSide[dir];
int y = y() + Facing.offsetsYForSide[dir];
int z = z() + Facing.offsetsZForSide[dir];
IPeripheral peripheral = PeripheralUtil.getPeripheral(world(), x, y, z, Facing.oppositeSide[dir]);
if (peripheral == null) {
id = -1;
} else if (id <= -1) {
id = IDAssigner.getNextIDFromFile(new File(ComputerCraft.getWorldDir(world()), "computer/lastid_" + peripheral.getType() + ".txt"));
}
return peripheral;
}
@Override
public Vec3 getPosition() {
return Vec3.createVectorHelper(x(), y(), z());
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.