answer
stringlengths 17
10.2M
|
|---|
package ai.grakn.graql.internal.gremlin;
import ai.grakn.GraknGraph;
import ai.grakn.graql.admin.Conjunction;
import ai.grakn.graql.admin.PatternAdmin;
import ai.grakn.graql.admin.VarAdmin;
import ai.grakn.graql.internal.gremlin.fragment.Fragment;
import ai.grakn.graql.internal.query.match.MatchOrder;
import ai.grakn.util.ErrorMessage;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import static java.util.stream.Collectors.toList;
/**
* A class for building gremlin traversals from patterns.
* <p>
* A {@code Query} is constructed from a single {@code Pattern.Conjunction}. The conjunction is transformed into
* disjunctive normal form and then an {@code InnerQuery} is constructed from each disjunction component. This allows
* each {@code InnerQuery} to be described by a single gremlin traversal.
* <p>
* The {@code Query} returns a list of gremlin traversals, whose results are combined by {@code MatchQueryImpl} to
* maintain any requested ordering.
*/
public class GremlinQuery {
protected final Logger LOG = LoggerFactory.getLogger(GremlinQuery.class);
private final GraknGraph graph;
private final Collection<ConjunctionQuery> innerQueries;
private final ImmutableSet<String> names;
private final Optional<MatchOrder> order;
/**
* @param graph the graph to execute the query on
* @param pattern a pattern to find in the graph
* @param names the variable names to select
* @param order an optional ordering
*/
public GremlinQuery(GraknGraph graph, PatternAdmin pattern, ImmutableSet<String> names, Optional<MatchOrder> order) {
Collection<Conjunction<VarAdmin>> patterns = pattern.getDisjunctiveNormalForm().getPatterns();
if (graph == null) {
throw new IllegalStateException(ErrorMessage.NO_GRAPH.getMessage());
}
this.graph = graph;
this.names = names;
this.order = order;
innerQueries = patterns.stream().map(ConjunctionQuery::new).collect(toList());
}
/**
* Get a close-to-optimal traversal plan to execute this query
*/
public GraqlTraversal optimalTraversal() {
return GraqlTraversal.semiOptimal(graph, innerQueries);
}
/**
* @return a gremlin traversal to execute to find results
*/
public GraphTraversal<Vertex, Map<String, Vertex>> getTraversal() {
GraqlTraversal graqlTraversal = optimalTraversal();
LOG.debug("Created query plan");
LOG.debug(graqlTraversal.toString());
// Because 'union' accepts an array, we can't use generics...
//noinspection unchecked
GraphTraversal<Vertex, Map<String, Vertex>> traversal = graqlTraversal.getGraphTraversal();
order.ifPresent(o -> o.orderTraversal(traversal));
String[] namesArray = names.toArray(new String[names.size()]);
// Must provide three arguments in order to pass an array to .select
// If ordering, select the variable to order by as well
if (order.isPresent()) {
traversal.select(order.get().getVar(), order.get().getVar(), namesArray);
} else if (namesArray.length != 0) {
traversal.select(namesArray[0], namesArray[0], namesArray);
}
return traversal;
}
/**
* @return a stream of concept IDs mentioned in the query
*/
public Stream<String> getConcepts() {
return innerQueries.stream().flatMap(ConjunctionQuery::getConcepts);
}
public Stream<GraqlTraversal> allGraqlTraversals() {
List<Set<List<Fragment>>> collect = innerQueries.stream().map(ConjunctionQuery::allFragmentOrders).collect(toList());
Set<List<List<Fragment>>> lists = Sets.cartesianProduct(collect);
return lists.stream().map(list -> GraqlTraversal.create(graph, Sets.newHashSet(list)));
}
}
|
package org.jnosql.artemis.graph;
import java.util.Optional;
import java.util.function.Supplier;
import java.util.stream.Stream;
import static java.util.Objects.requireNonNull;
/**
* The Graph Traversal that maps {@link org.apache.tinkerpop.gremlin.structure.Vertex}.
* This Traversal is lazy, in other words, that just run after the
*/
public interface VertexTraversal extends VertexConditionTraversal {
/**
* Map the {@link EdgeTraversal} to its outgoing incident edges given the edge labels.
*
* @param edgeLabels the edge labels to traverse
* @return a {@link EdgeTraversal} with the new condition
* @throws NullPointerException when has any null element
*/
EdgeTraversal outE(String... edgeLabels) throws NullPointerException;
/**
* Map the {@link EdgeTraversal} to its incoming incident edges given the edge labels.
*
* @param edgeLabels the edge labels to traverse
* @return a {@link EdgeTraversal} with the new condition
* @throws NullPointerException when has any null element
*/
EdgeTraversal inE(String... edgeLabels) throws NullPointerException;
/**
* Map the {@link EdgeTraversal} to its either incoming or outgoing incident edges given the edge labels.
*
* @param edgeLabels the edge labels to traverse
* @return a {@link EdgeTraversal} with the new condition
* @throws NullPointerException when has any null element
*/
EdgeTraversal bothE(String... edgeLabels) throws NullPointerException;
/**
* Starts the loop traversal graph
*
* @return a {@link VertexRepeatTraversal}
*/
VertexRepeatTraversal repeat();
/**
* Map the {@link EdgeTraversal} to its outgoing incident edges given the edge labels.
*
* @param label the edge labels to traverse
* @return a {@link EdgeTraversal} with the new condition
* @throws NullPointerException when has any null element
*/
default EdgeTraversal outE(Supplier<String> label) throws NullPointerException {
requireNonNull(label, "the supplier is required");
return outE(label.get());
}
/**
* Map the {@link EdgeTraversal} to its incoming incident edges given the edge labels.
*
* @param label the edge labels to traverse
* @return a {@link EdgeTraversal} with the new condition
* @throws NullPointerException when has any null element
*/
default EdgeTraversal inE(Supplier<String> label) throws NullPointerException {
requireNonNull(label, "the supplier is required");
return inE(label.get());
}
/**
* Map the {@link EdgeTraversal} to its either incoming or outgoing incident edges given the edge labels.
*
* @param label the edge labels to traverse
* @return a {@link EdgeTraversal} with the new condition
* @throws NullPointerException when has any null element
*/
default EdgeTraversal bothE(Supplier<String> label) throws NullPointerException {
requireNonNull(label, "the supplier is required");
return bothE(label.get());
}
/**
* Filter the objects in the traversal by the number of them to pass through the next, where only the first
* {@code n} objects are allowed as defined by the {@code limit} argument.
*
* @param limit the number at which to end the next
* @return a {@link VertexTraversal} with the limit
*/
VertexTraversal limit(long limit);
/**
* Returns the next elements in the traversal.
* If the traversal is empty, then an {@link Optional#empty()} is returned.
*
* @param <T> the entity type
* @return the entity result otherwise {@link Optional#empty()}
*/
<T> Optional<T> next();
/**
* Get all the result in the traversal as Stream
*
* @param <T> the entity type
* @return the entity result as {@link Stream}
*/
<T> Stream<T> stream();
/**
* Get the next n-number of results from the traversal.
*
* @param <T> the entity type
* @param limit the limit to result
* @return the entity result as {@link Stream}
*/
<T> Stream<T> next(int limit);
/**
* Map the {@link org.apache.tinkerpop.gremlin.structure.Element} to a {@link java.util.Map} of the properties key'd according
* to their {@link org.apache.tinkerpop.gremlin.structure.Property#key}.
* If no property keys are provided, then all properties are retrieved.
*
* @param propertyKeys the properties to retrieve
* @return a {@link ValueMapTraversal} instance
*/
ValueMapTraversal valueMap(final String... propertyKeys);
/**
* Map the traversal next to its reduction as a sum of the elements
*
* @return the sum
*/
long count();
}
|
package io.bisq.gui.main.market.offerbook;
import io.bisq.common.UserThread;
import io.bisq.common.locale.CurrencyUtil;
import io.bisq.common.locale.Res;
import io.bisq.common.util.Tuple4;
import io.bisq.core.offer.Offer;
import io.bisq.core.offer.OfferPayload;
import io.bisq.gui.Navigation;
import io.bisq.gui.common.view.ActivatableViewAndModel;
import io.bisq.gui.common.view.FxmlView;
import io.bisq.gui.components.AutoTooltipButton;
import io.bisq.gui.components.AutoTooltipLabel;
import io.bisq.gui.components.AutoTooltipTableColumn;
import io.bisq.gui.main.MainView;
import io.bisq.gui.main.offer.BuyOfferView;
import io.bisq.gui.main.offer.SellOfferView;
import io.bisq.gui.main.offer.offerbook.OfferBookListItem;
import io.bisq.gui.util.BSFormatter;
import io.bisq.gui.util.CurrencyListItem;
import io.bisq.gui.util.GUIUtil;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.ObservableList;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.chart.AreaChart;
import javafx.scene.chart.NumberAxis;
import javafx.scene.chart.XYChart;
import javafx.scene.control.*;
import javafx.scene.image.ImageView;
import javafx.scene.layout.HBox;
import javafx.scene.layout.Priority;
import javafx.scene.layout.VBox;
import javafx.util.Callback;
import javafx.util.StringConverter;
import org.fxmisc.easybind.EasyBind;
import org.fxmisc.easybind.Subscription;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.Collections;
import java.util.function.Function;
@FxmlView
public class OfferBookChartView extends ActivatableViewAndModel<VBox, OfferBookChartViewModel> {
private static final Logger log = LoggerFactory.getLogger(OfferBookChartView.class);
private NumberAxis xAxis;
private XYChart.Series seriesBuy, seriesSell;
private final Navigation navigation;
private final BSFormatter formatter;
private TableView<OfferListItem> buyOfferTableView;
private TableView<OfferListItem> sellOfferTableView;
private AreaChart<Number, Number> areaChart;
private ComboBox<CurrencyListItem> currencyComboBox;
private Subscription tradeCurrencySubscriber;
private final StringProperty volumeColumnLabel = new SimpleStringProperty();
private final StringProperty priceColumnLabel = new SimpleStringProperty();
private Button leftButton;
private Button rightButton;
private ChangeListener<Number> selectedTabIndexListener;
private SingleSelectionModel<Tab> tabPaneSelectionModel;
private Label leftHeaderLabel, rightHeaderLabel;
private ChangeListener<OfferListItem> sellTableRowSelectionListener, buyTableRowSelectionListener;
private HBox bottomHBox;
private ListChangeListener<OfferBookListItem> changeListener;
private ListChangeListener<CurrencyListItem> currencyListItemsListener;
private ChangeListener<Number> bisqWindowVerticalSizeListener;
private final double pixelsPerOfferTableRow = (109.0 / 4.0) + 10.0;
private final Function<Double, Double> offerTableViewHeight = (screenSize) -> {
// startup defaults: scene height = 710 tbl view height = 109 visible row count = 4
int extraRows = screenSize <= 710 ? 0 : (int) ((screenSize - 710) / pixelsPerOfferTableRow);
return extraRows == 0 ? 109 : Math.ceil(109 + (extraRows * pixelsPerOfferTableRow));
};
// Constructor, lifecycle
@SuppressWarnings("WeakerAccess")
@Inject
public OfferBookChartView(OfferBookChartViewModel model, Navigation navigation, BSFormatter formatter) {
super(model);
this.navigation = navigation;
this.formatter = formatter;
}
@Override
public void initialize() {
changeListener = c -> updateChartData();
currencyListItemsListener = c -> {
if (model.getSelectedCurrencyListItem().isPresent())
currencyComboBox.getSelectionModel().select(model.getSelectedCurrencyListItem().get());
};
currencyComboBox = new ComboBox<>();
currencyComboBox.setPromptText(Res.get("list.currency.select"));
currencyComboBox.setConverter(GUIUtil.getCurrencyListItemConverter(Res.get("shared.oneOffer"),
Res.get("shared.multipleOffers"),
model.preferences));
Label currencyLabel = new AutoTooltipLabel(Res.getWithCol("shared.currency"));
HBox currencyHBox = new HBox();
currencyHBox.setSpacing(5);
currencyHBox.setPadding(new Insets(5, -20, -5, 20));
currencyHBox.setAlignment(Pos.CENTER_LEFT);
currencyHBox.getChildren().addAll(currencyLabel, currencyComboBox);
createChart();
Tuple4<TableView<OfferListItem>, VBox, Button, Label> tupleBuy = getOfferTable(OfferPayload.Direction.BUY);
Tuple4<TableView<OfferListItem>, VBox, Button, Label> tupleSell = getOfferTable(OfferPayload.Direction.SELL);
buyOfferTableView = tupleBuy.first;
sellOfferTableView = tupleSell.first;
leftButton = tupleBuy.third;
rightButton = tupleSell.third;
leftHeaderLabel = tupleBuy.forth;
rightHeaderLabel = tupleSell.forth;
bottomHBox = new HBox();
bottomHBox.setSpacing(20);
bottomHBox.setAlignment(Pos.CENTER);
HBox.setHgrow(tupleBuy.second, Priority.ALWAYS);
HBox.setHgrow(tupleSell.second, Priority.ALWAYS);
tupleBuy.second.setUserData(OfferPayload.Direction.BUY.name());
tupleSell.second.setUserData(OfferPayload.Direction.SELL.name());
bottomHBox.getChildren().addAll(tupleBuy.second, tupleSell.second);
root.getChildren().addAll(currencyHBox, areaChart, bottomHBox);
}
@Override
protected void activate() {
// root.getParent() is null at initialize
tabPaneSelectionModel = GUIUtil.getParentOfType(root, TabPane.class).getSelectionModel();
selectedTabIndexListener = (observable, oldValue, newValue) -> model.setSelectedTabIndex((int) newValue);
model.setSelectedTabIndex(tabPaneSelectionModel.getSelectedIndex());
tabPaneSelectionModel.selectedIndexProperty().addListener(selectedTabIndexListener);
currencyComboBox.setItems(model.getCurrencyListItems());
currencyComboBox.setVisibleRowCount(25);
if (model.getSelectedCurrencyListItem().isPresent())
currencyComboBox.getSelectionModel().select(model.getSelectedCurrencyListItem().get());
currencyComboBox.setOnAction(e -> {
CurrencyListItem selectedItem = currencyComboBox.getSelectionModel().getSelectedItem();
if (selectedItem != null) {
model.onSetTradeCurrency(selectedItem.tradeCurrency);
updateChartData();
}
});
model.currencyListItems.addListener(currencyListItemsListener);
model.getOfferBookListItems().addListener(changeListener);
tradeCurrencySubscriber = EasyBind.subscribe(model.selectedTradeCurrencyProperty,
tradeCurrency -> {
String code = tradeCurrency.getCode();
areaChart.setTitle(Res.get("market.offerBook.chart.title", formatter.getCurrencyNameAndCurrencyPair(code)));
volumeColumnLabel.set(Res.get("shared.amountWithCur", code));
xAxis.setTickLabelFormatter(new StringConverter<Number>() {
@Override
public String toString(Number object) {
final double doubleValue = (double) object;
if (CurrencyUtil.isCryptoCurrency(model.getCurrencyCode())) {
final String withPrecision3 = formatter.formatRoundedDoubleWithPrecision(doubleValue, 3);
if (withPrecision3.equals("0.000"))
return formatter.formatRoundedDoubleWithPrecision(doubleValue, 8);
else
return withPrecision3;
} else {
return formatter.formatRoundedDoubleWithPrecision(doubleValue, 2);
}
}
@Override
public Number fromString(String string) {
return null;
}
});
if (CurrencyUtil.isCryptoCurrency(code)) {
if (bottomHBox.getChildren().size() == 2 && bottomHBox.getChildren().get(0).getUserData().equals(OfferPayload.Direction.BUY.name())) {
bottomHBox.getChildren().get(0).toFront();
reverseTableColumns();
}
leftHeaderLabel.setText(Res.get("market.offerBook.buyOffersHeaderLabel", code));
leftButton.setText(Res.get("market.offerBook.buyAltcoin", code, Res.getBaseCurrencyCode()));
rightHeaderLabel.setText(Res.get("market.offerBook.sellOffersHeaderLabel", code));
rightButton.setText(Res.get("market.offerBook.sellAltcoin", code, Res.getBaseCurrencyCode()));
priceColumnLabel.set(Res.get("shared.priceWithCur", Res.getBaseCurrencyCode()));
} else {
if (bottomHBox.getChildren().size() == 2 && bottomHBox.getChildren().get(0).getUserData().equals(OfferPayload.Direction.SELL.name())) {
bottomHBox.getChildren().get(0).toFront();
reverseTableColumns();
}
leftHeaderLabel.setText(Res.get("market.offerBook.sellOffersHeaderLabel", Res.getBaseCurrencyCode()));
leftButton.setText(Res.get("market.offerBook.sellWithFiat", Res.getBaseCurrencyCode(), code));
rightHeaderLabel.setText(Res.get("market.offerBook.buyOffersHeaderLabel", Res.getBaseCurrencyCode()));
rightButton.setText(Res.get("market.offerBook.buyWithFiat", Res.getBaseCurrencyCode(), code));
priceColumnLabel.set(Res.get("shared.priceWithCur", code));
}
xAxis.setLabel(formatter.getPriceWithCurrencyCode(code));
seriesBuy.setName(leftHeaderLabel.getText() + " ");
seriesSell.setName(rightHeaderLabel.getText());
});
buyOfferTableView.setItems(model.getTopBuyOfferList());
sellOfferTableView.setItems(model.getTopSellOfferList());
buyTableRowSelectionListener = (observable, oldValue, newValue) -> {
model.preferences.setSellScreenCurrencyCode(model.getCurrencyCode());
//noinspection unchecked
navigation.navigateTo(MainView.class, SellOfferView.class);
};
sellTableRowSelectionListener = (observable, oldValue, newValue) -> {
model.preferences.setBuyScreenCurrencyCode(model.getCurrencyCode());
//noinspection unchecked
navigation.navigateTo(MainView.class, BuyOfferView.class);
};
buyOfferTableView.getSelectionModel().selectedItemProperty().addListener(buyTableRowSelectionListener);
sellOfferTableView.getSelectionModel().selectedItemProperty().addListener(sellTableRowSelectionListener);
bisqWindowVerticalSizeListener = (observable, oldValue, newValue) -> {
double newTableViewHeight = offerTableViewHeight.apply(newValue.doubleValue());
if(buyOfferTableView.getHeight() != newTableViewHeight) {
buyOfferTableView.setMinHeight(newTableViewHeight);
sellOfferTableView.setMinHeight(newTableViewHeight);
}
};
root.getScene().heightProperty().addListener(bisqWindowVerticalSizeListener);
updateChartData();
}
@Override
protected void deactivate() {
model.getOfferBookListItems().removeListener(changeListener);
tabPaneSelectionModel.selectedIndexProperty().removeListener(selectedTabIndexListener);
model.currencyListItems.removeListener(currencyListItemsListener);
tradeCurrencySubscriber.unsubscribe();
currencyComboBox.setOnAction(null);
buyOfferTableView.getSelectionModel().selectedItemProperty().removeListener(buyTableRowSelectionListener);
sellOfferTableView.getSelectionModel().selectedItemProperty().removeListener(sellTableRowSelectionListener);
}
private void createChart() {
xAxis = new NumberAxis();
xAxis.setForceZeroInRange(false);
xAxis.setAutoRanging(true);
NumberAxis yAxis = new NumberAxis();
yAxis.setForceZeroInRange(false);
yAxis.setAutoRanging(true);
yAxis.setLabel(Res.get("shared.amountWithCur", Res.getBaseCurrencyCode()));
yAxis.setTickLabelFormatter(new NumberAxis.DefaultFormatter(yAxis, "", ""));
seriesBuy = new XYChart.Series<>();
seriesSell = new XYChart.Series<>();
areaChart = new AreaChart<>(xAxis, yAxis);
areaChart.setLegendVisible(false);
areaChart.setAnimated(false);
areaChart.setId("charts");
areaChart.setMinHeight(300);
areaChart.setPrefHeight(300);
areaChart.setPadding(new Insets(0, 30, 0, 0));
areaChart.getData().addAll(seriesBuy, seriesSell);
}
private void updateChartData() {
seriesBuy.getData().clear();
seriesSell.getData().clear();
//noinspection unchecked
seriesBuy.getData().addAll(model.getBuyData());
//noinspection unchecked
seriesSell.getData().addAll(model.getSellData());
}
private Tuple4<TableView<OfferListItem>, VBox, Button, Label> getOfferTable(OfferPayload.Direction direction) {
TableView<OfferListItem> tableView = new TableView<>();
tableView.setMinHeight(109);
tableView.setPrefHeight(121);
tableView.setMinWidth(480);
// price
TableColumn<OfferListItem, OfferListItem> priceColumn = new TableColumn<>();
priceColumn.textProperty().bind(priceColumnLabel);
priceColumn.setMinWidth(115);
priceColumn.setMaxWidth(115);
priceColumn.setSortable(false);
priceColumn.setCellValueFactory((offer) -> new ReadOnlyObjectWrapper<>(offer.getValue()));
priceColumn.setCellFactory(
new Callback<TableColumn<OfferListItem, OfferListItem>, TableCell<OfferListItem, OfferListItem>>() {
@Override
public TableCell<OfferListItem, OfferListItem> call(TableColumn<OfferListItem, OfferListItem> column) {
return new TableCell<OfferListItem, OfferListItem>() {
private Offer offer;
final ChangeListener<Number> listener = new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> observable, Number oldValue, Number newValue) {
if (offer != null && offer.getPrice() != null) {
setText(formatter.formatPrice(offer.getPrice()));
model.priceFeedService.updateCounterProperty().removeListener(listener);
}
}
};
@Override
public void updateItem(final OfferListItem offerListItem, boolean empty) {
super.updateItem(offerListItem, empty);
if (offerListItem != null && !empty) {
if (offerListItem.offer.getPrice() == null) {
this.offer = offerListItem.offer;
model.priceFeedService.updateCounterProperty().addListener(listener);
setText(Res.get("shared.na"));
} else {
setText(formatter.formatPrice(offerListItem.offer.getPrice()));
}
} else {
if (listener != null)
model.priceFeedService.updateCounterProperty().removeListener(listener);
this.offer = null;
setText("");
}
}
};
}
});
// volume
TableColumn<OfferListItem, OfferListItem> volumeColumn = new TableColumn<>();
volumeColumn.setMinWidth(115);
volumeColumn.setSortable(false);
volumeColumn.textProperty().bind(volumeColumnLabel);
volumeColumn.setCellValueFactory((offer) -> new ReadOnlyObjectWrapper<>(offer.getValue()));
volumeColumn.setCellFactory(
new Callback<TableColumn<OfferListItem, OfferListItem>, TableCell<OfferListItem, OfferListItem>>() {
@Override
public TableCell<OfferListItem, OfferListItem> call(TableColumn<OfferListItem, OfferListItem> column) {
return new TableCell<OfferListItem, OfferListItem>() {
private Offer offer;
final ChangeListener<Number> listener = new ChangeListener<Number>() {
@Override
public void changed(ObservableValue<? extends Number> observable, Number oldValue, Number newValue) {
if (offer != null && offer.getPrice() != null) {
setText(formatter.formatVolume(offer.getVolume()));
model.priceFeedService.updateCounterProperty().removeListener(listener);
}
}
};
@Override
public void updateItem(final OfferListItem offerListItem, boolean empty) {
super.updateItem(offerListItem, empty);
if (offerListItem != null && !empty) {
this.offer = offerListItem.offer;
if (offer.getPrice() == null) {
this.offer = offerListItem.offer;
model.priceFeedService.updateCounterProperty().addListener(listener);
setText(Res.get("shared.na"));
} else {
setText(formatter.formatVolume(offer.getVolume()));
}
} else {
if (listener != null)
model.priceFeedService.updateCounterProperty().removeListener(listener);
this.offer = null;
setText("");
}
}
};
}
});
// amount
TableColumn<OfferListItem, OfferListItem> amountColumn = new AutoTooltipTableColumn<>(Res.get("shared.amountWithCur", Res.getBaseCurrencyCode()));
amountColumn.setMinWidth(115);
amountColumn.setSortable(false);
amountColumn.setCellValueFactory((offer) -> new ReadOnlyObjectWrapper<>(offer.getValue()));
amountColumn.setCellFactory(
new Callback<TableColumn<OfferListItem, OfferListItem>, TableCell<OfferListItem, OfferListItem>>() {
@Override
public TableCell<OfferListItem, OfferListItem> call(TableColumn<OfferListItem, OfferListItem> column) {
return new TableCell<OfferListItem, OfferListItem>() {
@Override
public void updateItem(final OfferListItem offerListItem, boolean empty) {
super.updateItem(offerListItem, empty);
if (offerListItem != null && !empty)
setText(formatter.formatCoin(offerListItem.offer.getAmount()));
else
setText("");
}
};
}
});
// Lets remove that as it is not really relevant and seems to be confusing to some users
// accumulated
/* TableColumn<OfferListItem, OfferListItem> accumulatedColumn = new AutoTooltipTableColumn<>(Res.get("shared.sumWithCur", Res.getBaseCurrencyCode()));
accumulatedColumn.setMinWidth(100);
accumulatedColumn.setSortable(false);
accumulatedColumn.setCellValueFactory((offer) -> new ReadOnlyObjectWrapper<>(offer.getValue()));
accumulatedColumn.setCellFactory(
new Callback<TableColumn<OfferListItem, OfferListItem>, TableCell<OfferListItem, OfferListItem>>() {
@Override
public TableCell<OfferListItem, OfferListItem> call(TableColumn<OfferListItem, OfferListItem> column) {
return new TableCell<OfferListItem, OfferListItem>() {
@Override
public void updateItem(final OfferListItem offerListItem, boolean empty) {
super.updateItem(offerListItem, empty);
if (offerListItem != null && !empty)
setText(formatter.formatRoundedDoubleWithPrecision(offerListItem.accumulated, 4));
else
setText("");
}
};
}
});
*/
tableView.getColumns().add(volumeColumn);
tableView.getColumns().add(amountColumn);
tableView.getColumns().add(priceColumn);
tableView.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY);
Label placeholder = new AutoTooltipLabel(Res.get("table.placeholder.noItems", Res.get("shared.multipleOffers")));
placeholder.setWrapText(true);
tableView.setPlaceholder(placeholder);
Label titleLabel = new AutoTooltipLabel();
titleLabel.getStyleClass().add("table-title");
UserThread.execute(() -> titleLabel.prefWidthProperty().bind(tableView.widthProperty()));
boolean isSellOffer = direction == OfferPayload.Direction.SELL;
Button button = new AutoTooltipButton();
ImageView iconView = new ImageView();
iconView.setId(isSellOffer ? "image-buy-white" : "image-sell-white");
button.setGraphic(iconView);
button.setGraphicTextGap(10);
button.setText(isSellOffer ? Res.get("market.offerBook.buy") : Res.get("market.offerBook.sell"));
button.setMinHeight(40);
button.setId(isSellOffer ? "buy-button-big" : "sell-button-big");
button.setOnAction(e -> {
if (isSellOffer) {
model.preferences.setBuyScreenCurrencyCode(model.getCurrencyCode());
//noinspection unchecked
navigation.navigateTo(MainView.class, BuyOfferView.class);
} else {
model.preferences.setSellScreenCurrencyCode(model.getCurrencyCode());
//noinspection unchecked
navigation.navigateTo(MainView.class, SellOfferView.class);
}
});
VBox vBox = new VBox();
vBox.setSpacing(10);
vBox.setFillWidth(true);
vBox.setMinHeight(190);
vBox.setVgrow(tableView, Priority.ALWAYS);
vBox.getChildren().addAll(titleLabel, tableView, button);
button.prefWidthProperty().bind(vBox.widthProperty());
return new Tuple4<>(tableView, vBox, button, titleLabel);
}
private void reverseTableColumns() {
ObservableList<TableColumn<OfferListItem, ?>> columns = FXCollections.observableArrayList(buyOfferTableView.getColumns());
buyOfferTableView.getColumns().clear();
Collections.reverse(columns);
buyOfferTableView.getColumns().addAll(columns);
columns = FXCollections.observableArrayList(sellOfferTableView.getColumns());
sellOfferTableView.getColumns().clear();
Collections.reverse(columns);
sellOfferTableView.getColumns().addAll(columns);
}
}
|
package verification.timed_state_exploration.zoneProject;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import verification.platu.main.Options;
import verification.platu.project.PrjState;
public class TimedStateSet extends HashSet<PrjState> {
/*
* Abstraction Function: The StateSet object follows one of two modes for storing
* elements called the set mode and the subset/superset mode. In the set mode
* elements are simply stored in a HashSet<PrjState> _singletonList. In this
* mode the StateSet simply a wrapper of the HashSet<PrjState> and passes
* the operations to the _singletonList.
*
* In the subset/superset mode, elements are stored in _setList. The idea of is
* to be able to take the un-timed portion of a TimedPrjState (which can be thought
* of as a PrjState) use it as a key and have return all TimedPrjStates in the
* StateSet that share the same un-timed portion. Thus the key value pair
* (k, v) of the HashMap should be such that v is a list containing only
* TimedPrjStates that have the same un-timed portion k.
*/
/*
* Representation Invariant: Only one of _singletonList or _setList should
* be non-null at a time and should correspond to whether the StateSet is in the
* subset/superset mode or not. Furthermore, if the StateSet is operating in the
* subset/superset mode, then it is the _setList that should be non-null.
* The mode is set in the constructor and should not change as long as the object
* is in existence.
*
* In the set mode, operations should be simply passed to the _singletonList
* thereby ensuring that the StateSet acts identical to a HashSet in this mode.
*
* Given a key value pair (k, v) stored in the _setList, the LinkedList v should
* only contain TimedPrjStates that have an un-timed portion equal to k. Specifically,
* s.getUntimedPrjState.equals(k) should return true for each TimedState in v.
*/
private static final long serialVersionUID = 1L;
protected HashSet<PrjState> _singletonList;
protected HashMap<PrjState, LinkedList<TimedPrjState>> _setList;
// Caches whether subsets, supersets, and timing is being used.
private boolean timed, subsets, supersets;
/**
* Creates a state set. The StateSet will be initialized into the into the subset/superset
* mode if the expression
* Options.getImingAnalysisFlag() && (ZoneType.getSubsetFlag() ||
* ZoneType.getSupersetFlag())
* is true. Otherwise the StateSet will be initialized into the set mode. Once the
* StateSet is initialized in the set mode or the subset/superset mode, it cannot be
* changed.
*/
public TimedStateSet(){
// Store that status when StateSet is initialized to avoid unexpected behavior.
timed = Options.getTimingAnalysisFlag();
subsets = Zone.getSubsetFlag();
supersets = Zone.getSupersetFlag();
if(timed && (subsets || supersets)){
_setList = new HashMap<PrjState, LinkedList<TimedPrjState>>();
}
else{
_singletonList = new HashSet<PrjState>();
}
}
// public StateSet(boolean map){
// if(map){
// _setList = new HashMap<PrjState, LinkedList<TimedPrjState>>();
// else{
// _singletonList = new HashSet<PrjState>();
/**
* Determines whether the any PrjStates are in the StateSet.
* @return
* True if any PrjStates are in the StateSet, false otherwise.
*/
public boolean isEmpty(){
if(_singletonList != null){
return _singletonList.isEmpty();
}
else{
return _setList.isEmpty();
}
}
/**
* Determines how many PrjStates are in the StateSet.
* @return
* A non-negative integer that gives the number of PrjStates are in the StateSet.
*/
public int size(){
if(_singletonList != null){
return _singletonList.size();
}
else{
int result = 0;
for(LinkedList<TimedPrjState> l : _setList.values()){
result += l.size();
}
return result;
}
}
/**
* Adds a PrjState to the StateSet.
* @param s
* The PrjState to add to the StateSet.
* @return
* True if the StateSet changes by adding the element s.
*/
public boolean add(PrjState s){
if(_singletonList != null){
return _singletonList.add(s);
}
if(_setList != null){
if(!(s instanceof TimedPrjState)){
throw new IllegalArgumentException("Subset/superset mode set, but an un-timed" +
"state is being added.");
}
TimedPrjState ts = (TimedPrjState) s;
PrjState untimedState = ts.getUntimedPrjState();
LinkedList<TimedPrjState> list = _setList.get(untimedState);
if(list == null){
// No list is associated with this set of un-timed (local) states.
// So create a new list with this PrjState.
LinkedList<TimedPrjState> newList = new LinkedList<TimedPrjState>();
newList.add(ts);
_setList.put(untimedState, newList);
// The list changed, so return true;
return true;
}
if(list.contains(ts)){
// The set already contains the timed project state. So nothing changes.
return false;
}
else{
// The set does not already contain the timed project state. So add it.
list.add(ts);
}
return true;
}
throw new IllegalStateException("Add was used and StateSet was not initialized.");
}
// public boolean add(PrjState s){
// if(s instanceof TimedPrjState){
// TimedPrjState ts = (TimedPrjState) s;
// return add(ts);
// else if(_singletonList == null){
// " but only an un-timed state is being added.");
// return _singletonList.add(s);
// public boolean add(TimedPrjState s){
// if(_setList == null){
// " but a timed project state is attempted to be added.");
// PrjState untimedState = s.getUntimedPrjState();
// LinkedList<TimedPrjState> list = _setList.get(untimedState);
// if(list == null){
// // No list is associated with this set of un-timed (local) states.
// // So create a new list with this PrjState.
// LinkedList<TimedPrjState> newList = new LinkedList<TimedPrjState>();
// newList.add(s);
// _setList.put(untimedState, newList);
// // The list changed, so return true;
// return true;
// if(list.contains(s)){
// // The set already contains the timed project state. So nothing changes.
// return false;
// else{
// // The set does not already contain the timed project state. So add it.
// list.add(s);
// return true;
/**
* Overrides the HashSet version of the contains method since TimedStateSets
* handle containment a little differently.
*/
public boolean contains(Object obj){
if(!(obj instanceof PrjState)){
return false;
}
return contains((PrjState) obj);
}
/**
* Determines whether the StateSet contains the PrjState or not.
* @param s
* The PrjState to determine if the PrjState contains it.
* @return
* True if s is in the PrjState, false otherwise.
*/
public boolean contains(PrjState s){
if(_singletonList != null){
return _singletonList.contains(s);
}
if(_setList != null){
// If _setList != null, then StateSet has been initialized into subset/superset mode.
// Thus there are three possibilities : subsets has been selected, supersets has been
// selected, or both subsets and supersets have been selected.
if(!(s instanceof TimedPrjState)){
throw new IllegalArgumentException("Subset/superset mode set, but an un-timed" +
"state is being added.");
}
TimedPrjState ts = (TimedPrjState) s;
// Get the un-timed portion for the cache.
PrjState untimedState = ts.getUntimedPrjState();
// Get the list keyed to this set of (local) un-timed states.
LinkedList<TimedPrjState> list = _setList.get(untimedState);
if(list == null){
// No list is associated with this set of un-timed (local) states.
// So the timed state cannot be in the set.
return false;
}
// Get an iterator from the list to allow removal of elements as the list is
// traversed.
Iterator<TimedPrjState> iterate = list.iterator();
boolean result = false;
while(iterate.hasNext()){
TimedPrjState listState = iterate.next();
// If subsets are selected, then iteration can be exited as soon as a subset is found.
if(subsets && ts.subset(listState)){
return true;
}
// If supersets are selected, items that are subsets of the new state may be
// removed.
if(supersets){
if(!subsets && ts.equals(listState)){
// When an equal state is found, the return value must be true and the
// state should not be removed. When not doing subsets, the superset check
// cannot end here (since other sets that are supersets may exist further in the
// list). If control has passed here, it can be deduced that subsets has not
// been selected even without the subset flag since the
// if(subsets && s.subset(listState) would have already been taken.
// The subset flag is added here to allow a quick out of the extra equality check.
result |= true;
}
else if(ts.superset(listState)){
// The new state (s) is a strict superset of an existing state.
// Remove the existing state.
iterate.remove();
}
}
}
return result;
}
throw new IllegalStateException("Contains was called and StateSet was not initialized.");
}
// public boolean contains(PrjState s){
// if(s instanceof TimedPrjState){
// TimedPrjState ts = (TimedPrjState) s;
// return contains(ts);
// else if(_singletonList == null){
// " but only an untimed state is being added.");
// return _singletonList.contains(s);
// public boolean contains(TimedPrjState s){
// if(_setList == null){
// " but a timed project state is attempted to be added.");
// // If _setList == null, then StateSet has been initialized into subset/superset mode.
// // Thus there are three possibilities : subsets has been selected, supersets has been
// // selected, or both subsets and supersets have been selected.
// // Get the un-timed portion for the cache.
// PrjState untimedState = s.getUntimedPrjState();
// // Get the list keyed to this set of (local) untimed states.
// LinkedList<TimedPrjState> list = _setList.get(untimedState);
// if(list == null){
// // No list is associated with this set of un-timed (local) states.
// // So the timed state cannot be in the set.
// return false;
// //return list.contains(s);
// // Get an iterator from the list to allow removal of elements as the list is
// // traversed.
// Iterator<TimedPrjState> iterate = list.iterator();
// boolean result = false;
// while(iterate.hasNext()){
// TimedPrjState listState = iterate.next();
// // If subsets are selected, then iteration can be exited as soon as a subset is found.
// if(subsets && s.subset(listState)){
// return true;
// // If supersets are selected, items that are subsets of the new state may be
// // removed.
// if(supersets){
// if(!subsets && s.equals(listState)){
// // When an equal state is found, the return value must be true and the
// // state should not be removed. When not doing subsets, the superset check
// // cannot end here (since other sets that are supersets may exist further in the
// // list). If control has passed here, it can be deduced that subsets has not
// // been selected even without the subset flag since the
// // if(subsets && s.subset(listState) would have already been taken.
// // The subset flag is added here to allow a quick out of the extra equality check.
// result |= true;
// else if(s.superset(listState)){
// // The new state (s) is a strict superset of an existing state.
// // Remove the existing state.
// iterate.remove();
// return result;
/**
* Converts the StateSet into a HashSet<PrjState>.
* @return
* A HashSet<PrjState> containing the same PrjStates as the StateSet.
*/
public HashSet<PrjState> toHashSet(){
if(_singletonList != null){
return _singletonList;
}
else{
HashSet<PrjState> result = new HashSet<PrjState>();
for(LinkedList<TimedPrjState> list : _setList.values()){
result.addAll(list);
}
return result;
}
}
public String stateString(){
String result = "";
if(_singletonList != null){
result += "# of prjStates found: " + size();
}
if(_setList != null){
// Report the total number of project states found.
result += "# of timedPrjStates found: " + size();
// Report the number of un-timed states.
result += ", # of untimed states found: " + _setList.size();
// Report the largest Zone used.
result += ", Largest zone: " + Zone.ZoneSize;
}
return result;
}
/*
* (non-Javadoc)
* @see java.lang.Iterable#iterator()
*/
public Iterator<PrjState> iterator(){
Iterator<PrjState> hashIterator = null;
Iterator<LinkedList<TimedPrjState>> listIterator = null;
if(_singletonList != null){
hashIterator = _singletonList.iterator();
}
if(_setList != null){
listIterator = _setList.values().iterator();
}
return new StateSetIterator(hashIterator, listIterator);
}
/**
* This is the particular version of the iterator that the StateSet uses.
* @author Andrew N. Fisher
*
*/
private class StateSetIterator implements Iterator<PrjState>{
/*
* Abstraction Function:
* A StateSetIterator is the implementation for the Iterator<PrjState>
* required by StateSet being iterable. The StateSet has two modes,
* one where a HashSet<PrjState> is used and one where a
* HashMap<PrjState, LinkedList<TimedPrjState>> is used. Correspondingly,
* This iterator has two modes. One that is meant iterate through the
* HashSet and the other that is meant to iterate through the HashMap.
* The member variable _hashIterator is simply the HashSet's own iterator.
* The member variables _hashListIterator and _listIterator together
* iterate through all the TimedPrjStates that are stored in the LinkedLists
* of a HashMap<PrjState, <LinkedList<TimedPrjState>>. The _hashListIterator
* iterator goes through each of the LinkedList and the _listIterator goes
* through a single list. Thus the idea is to get the first LinkedList,
* traverse its elements, get the second LinkedList, traverse its elements
* and so on until all elements of the LinkedLists have been traversed.
*/
/*
* Representation Invariant : Only one of _hashIterator or _hashListIterator
* should be non-null at one time. The iterator should be iterating through
* one mode at a time, either the HashSet mode or the LinkedList mode.
*
* If _hashListIterator is not null, then _listIterator should either
* be the iterator for the last LinkedList returned by _hashListIterator
* or an iterator that has not exhausted all its elements. The idea is
* the _listIterator should be able to give the next element that is to
* be return if there are still elements that can be returned.
*/
Iterator<PrjState> _hashIterator;
Iterator<LinkedList<TimedPrjState>> _hashListIterator;
Iterator<TimedPrjState> _listIterator;
private StateSetIterator(Iterator<PrjState> hashIterator,
Iterator<LinkedList<TimedPrjState>> listIterator){
// This method initializes the iterator for the StateSet. It initializes
// the iterator member variables. The member variables that are initialized
// should match the mode that the StateSet is in. This method may be made
// to check the member variables directly, but currently determines things
// by the variables passed. The HashIterator should be an iterator for the
// __singletonList member variable of the StateSet and the listIterator
// should be an iterator for the _setList member variable of the StateSet.
// In keeping with only one mode being initialized, only one of the member
// variables can be non-null.
if(hashIterator != null && listIterator !=null){
throw new IllegalStateException("Only one iterator should be non-null.");
}
_hashIterator = hashIterator;
_hashListIterator = listIterator;
if(_hashListIterator != null && _hashListIterator.hasNext()){
_listIterator = _hashListIterator.next().iterator();
// Find the first list with an element or end with the last list.
while( !_listIterator.hasNext() && _hashListIterator.hasNext()){
_listIterator = _hashListIterator.next().iterator();
}
}
}
/*
* (non-Javadoc)
* @see java.util.Iterator#hasNext()
*/
public boolean hasNext() {
if(_hashIterator !=null){
return _hashIterator.hasNext();
}
if(_listIterator == null){
return false;
}
return _listIterator.hasNext();
}
/*
* (non-Javadoc)
* @see java.util.Iterator#next()
*/
public PrjState next() {
if(_hashIterator != null){
return _hashIterator.next();
}
PrjState nextState = _listIterator.next();
// If this list is exhausted, find the next list with elements or
// get the last list.
while( !_listIterator.hasNext() && _hashListIterator.hasNext()){
_listIterator = _hashListIterator.next().iterator();
}
return nextState;
}
/*
* (non-Javadoc)
* @see java.util.Iterator#remove()
*/
public void remove() {
if(_hashIterator != null){
_hashIterator.remove();
return;
}
// TODO: This has the following flaw currently. Suppose list1 and lists2 are two
// nonempty consecutive lists. Once next is called on the last element in list1,
// list2 becomes the new _listIterator. If removed is called, it will be called on list2
// instead of list1 causing an error.
if(_hashListIterator != null){
_listIterator.remove();
}
throw new UnsupportedOperationException("The iterator was not initialized.");
}
}
}
|
package uk.ac.ebi.atlas.experimentpage;
import uk.ac.ebi.atlas.model.experiment.Experiment;
import uk.ac.ebi.atlas.resource.AtlasResourceHub;
import uk.ac.ebi.atlas.search.SemanticQuery;
import uk.ac.ebi.atlas.utils.HeatmapDataToJsonService;
import uk.ac.ebi.atlas.web.ApplicationProperties;
import uk.ac.ebi.atlas.web.ExperimentPageRequestPreferences;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import javax.servlet.http.HttpServletRequest;
import java.util.HashMap;
import java.util.Map;
public class ExperimentPageService {
private final AtlasResourceHub atlasResourceHub;
protected final HeatmapDataToJsonService heatmapDataToJsonService;
protected final ApplicationProperties applicationProperties;
protected final Gson gson = new Gson();
public ExperimentPageService(AtlasResourceHub atlasResourceHub,HeatmapDataToJsonService heatmapDataToJsonService, ApplicationProperties applicationProperties){
this.atlasResourceHub = atlasResourceHub;
this.heatmapDataToJsonService = heatmapDataToJsonService;
this.applicationProperties = applicationProperties;
}
protected Map<String, ?> headerAttributes(Experiment experiment) {
Map<String, Object> result = new HashMap<>();
result.put("hasExtraInfo", atlasResourceHub.hasExtraInfo(experiment));
return result;
}
protected Map<String, JsonElement> payloadAttributes(Experiment experiment,
ExperimentPageRequestPreferences requestPreferences){
Map<String, JsonElement> result = new HashMap<>();
result.put("jsonExperiment", prepareExperimentDescription(experiment, requestPreferences));
return result;
}
protected String downloadURL(SemanticQuery geneQuery, HttpServletRequest request){
return applicationProperties.buildDownloadURL(geneQuery, request);
}
private JsonElement prepareExperimentDescription(Experiment experiment, ExperimentPageRequestPreferences
requestPreferences){
return prepareExperimentDescription(experiment, requestPreferences.getGeneQuery(), requestPreferences
.getSerializedFilterFactors());
}
//used when external parties include our widget and also to pass header summary to heatmap tooltips
private JsonElement prepareExperimentDescription(Experiment experiment, SemanticQuery geneQuery, String
serializedFilterFactors) {
String additionalQueryOptionsString =
"?geneQuery="+geneQuery.toUrlEncodedJson()+
"&serializedFilterFactors="+serializedFilterFactors;
JsonObject experimentDescription = new JsonObject();
experimentDescription.addProperty("URL", "/experiments/"+experiment.getAccession()+additionalQueryOptionsString);
experimentDescription.addProperty("relUrl", "experiments/"+experiment.getAccession()+additionalQueryOptionsString);
experimentDescription.addProperty("description", experiment.getDescription());
experimentDescription.addProperty("species", experiment.getSpecies().getName());
return experimentDescription;
}
}
|
package com.spotify.helios;
import com.google.common.base.Optional;
import com.google.common.base.Throwables;
import com.google.common.net.HostAndPort;
import com.spotify.docker.client.DefaultDockerClient;
import com.spotify.docker.client.DockerClient;
import com.spotify.helios.client.HeliosClient;
import com.spotify.helios.common.descriptors.HostStatus;
import com.spotify.helios.testing.Prober;
import com.spotify.helios.testing.TemporaryJob;
import com.spotify.helios.testing.TemporaryJobBuilder;
import com.spotify.helios.testing.TemporaryJobs;
import org.apache.commons.io.IOUtils;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import java.net.Socket;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.spotify.helios.Utils.soloImage;
import static com.spotify.helios.system.SystemTestBase.ALPINE;
import static com.spotify.helios.system.SystemTestBase.BUSYBOX;
import static com.spotify.helios.system.SystemTestBase.IDLE_COMMAND;
import static com.spotify.helios.system.SystemTestBase.NGINX;
import static java.lang.System.getenv;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.experimental.results.PrintableResult.testResult;
import static org.junit.experimental.results.ResultMatchers.isSuccessful;
public class HeliosSoloIT {
@Rule
public final TemporaryJobs temporaryJobs = TemporaryJobs.create();
private static final String TEST_USER = "HeliosIT";
private static final String TEST_HOST = "solo.local";
private static HeliosClient soloClient;
@Before
public void setup() throws Exception {
// we're going to start helios-solo in a helios job. figure out the docker host, cert path, and
// other stuff that helios-solo needs.
String dockerHost = Optional.fromNullable(getenv("DOCKER_HOST"))
.or("unix:///var/run/docker.sock");
String certPath = getenv("DOCKER_CERT_PATH");
final DockerClient docker = DefaultDockerClient.fromEnv().build();
if (docker.info().kernelVersion().contains("tinycore64")) {
// using boot2docker, so use the unix socket endpoint
dockerHost = "unix:///var/run/docker.sock";
certPath = null;
}
// use a probe container to get the correct value for HELIOS_HOST_ADDRESS
final TemporaryJob probe = temporaryJobs.job()
.image(BUSYBOX)
.command(IDLE_COMMAND)
.deploy();
final String hostAddress = temporaryJobs.client()
.hostStatus(probe.hosts().get(0)).get()
.getEnvironment()
.get("HELIOS_HOST_ADDRESS");
// build the helios-solo job
final TemporaryJobBuilder solo = temporaryJobs.job()
.image(soloImage())
.prober(new SoloStatusProber())
.port("helios", 5801, 55801)
.env("HELIOS_ID", "solo_it")
.env("HELIOS_NAME", TEST_HOST)
.env("DOCKER_HOST", dockerHost)
.env("REGISTRAR_HOST_FORMAT", "_${service}._${protocol}.services.${domain}");
if (!isNullOrEmpty(hostAddress)) {
solo.env("HOST_ADDRESS", hostAddress);
}
if (!isNullOrEmpty(certPath)) {
solo.env("DOCKER_CERT_PATH", "/certs")
.volume("/certs", certPath);
}
if (dockerHost.startsWith("unix:
solo.volume("/var/run/docker.sock", dockerHost.replace("unix:
}
// deploy the helios-solo job and create a Helios client for talking to it
final String masterEndpoint = "http://" + solo.deploy().address("helios").toString();
soloClient = HeliosClient.newBuilder()
.setEndpoints(masterEndpoint)
.setUser(TEST_USER)
.build();
}
@Test
public void soloTest() throws Exception {
// run some jobs on the helios-solo cluster that we just brought up (inception/mind blown)
assertThat(testResult(HeliosSoloITImpl.class), isSuccessful());
assertTrue("jobs are running that should not be",
soloClient.jobs().get(15, SECONDS).isEmpty());
}
public static class HeliosSoloITImpl {
private TemporaryJob nginx;
private TemporaryJob alpine;
@Rule
public final TemporaryJobs soloTemporaryJobs = TemporaryJobs.builder()
.client(soloClient)
.prefixDirectory("/tmp/helios-solo-jobs")
.build();
@Before
public void setup() throws Exception {
// start a container that runs nginx and registers with SkyDNS
nginx = soloTemporaryJobs.job()
.image(NGINX)
.port("http", 80, 59980)
.registration("nginx", "http", "http")
.deploy();
// run a container that does SRV lookup to find the nginx service and then curl's it
alpine = soloTemporaryJobs.job()
.image(ALPINE)
.port("nc", 4711, 54711)
.command("sh", "-c",
"apk-install bind-tools " +
"&& export SRV=$(dig -t SRV +short _nginx._http.services.$SPOTIFY_DOMAIN) " +
"&& export HOST=$(echo $SRV | cut -d' ' -f4) " +
"&& export PORT=$(echo $SRV | cut -d' ' -f3) " +
"&& nc -lk -p 4711 -e curl http://$HOST:$PORT"
)
.deploy();
}
@Test
public void test() throws Exception {
final HostAndPort alpineAddress = alpine.address("nc");
// Connect to alpine container to get the curl response. If we get back the nginx welcome page
// we know that helios properly registered the nginx service in SkyDNS.
try (final Socket s = new Socket(alpineAddress.getHostText(), alpineAddress.getPort())) {
final String result = IOUtils.toString(s.getInputStream()).trim();
assertThat(result, containsString("Welcome to nginx!"));
}
}
}
private class SoloStatusProber implements Prober {
@Override
public boolean probe(String host, int port) {
try {
final HeliosClient soloClient = HeliosClient.newBuilder()
.setEndpoints("http://" + host + ":" + port)
.setUser(TEST_USER)
.build();
final HostStatus hostStatus = soloClient.hostStatus(TEST_HOST).get(30, TimeUnit.SECONDS);
return hostStatus != null && hostStatus.getStatus() == HostStatus.Status.UP;
} catch (InterruptedException | ExecutionException | TimeoutException e) {
throw Throwables.propagate(e);
}
}
}
}
|
package org.inaturalist.android;
import com.actionbarsherlock.app.SherlockFragmentActivity;
import com.actionbarsherlock.view.MenuItem;
import com.crashlytics.android.Crashlytics;
import com.koushikdutta.urlimageviewhelper.UrlImageViewCallback;
import com.koushikdutta.urlimageviewhelper.UrlImageViewHelper;
import android.os.Build;
import io.fabric.sdk.android.Fabric;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.widget.DrawerLayout;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
/**
* Utility class for implementing the side-menu (navigation drawer) used throughout the app
*
*/
public class BaseFragmentActivity extends SherlockFragmentActivity {
static final int CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE = 1;
static final int SELECT_IMAGE_REQUEST_CODE = 2;
private static final String TAG = "BaseFragmentActivity";
private DrawerLayout mDrawerLayout;
private LinearLayout mSideMenu;
private ActionBarDrawerToggle mDrawerToggle;
private INaturalistApp app;
private ActivityHelper mHelper;
private UserDetailsReceiver mUserDetailsReceiver;
public int getStatusBarHeight() {
int result = 0;
int resourceId = getResources().getIdentifier("status_bar_height", "dimen", "android");
if (resourceId > 0) {
result = getResources().getDimensionPixelSize(resourceId);
}
return result;
}
private void moveDrawerToTop() {
LayoutInflater inflater = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
DrawerLayout drawer = (DrawerLayout) inflater.inflate(R.layout.side_menu_decor, null); // "null" is important.
// HACK: "steal" the first child of decor view
ViewGroup decor = (ViewGroup) getWindow().getDecorView();
View child = decor.getChildAt(0);
decor.removeView(child);
ViewGroup container = (ViewGroup) drawer.findViewById(R.id.drawer_content); // This is the container we defined just now.
container.addView(child, 0);
drawer.findViewById(R.id.left_drawer).setPadding(0, getStatusBarHeight(), 0, 0);
// Make the drawer replace the first child
decor.addView(drawer);
}
public void onDrawerCreate(Bundle savedInstanceState) {
Fabric.with(this, new Crashlytics());
moveDrawerToTop();
mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout);
mSideMenu = (LinearLayout) findViewById(R.id.left_drawer);
mDrawerToggle = new ActionBarDrawerToggle(this, mDrawerLayout, R.drawable.ic_menu_black_24dp, 0, 0) {
public void onDrawerClosed(View view) {
super.onDrawerClosed(view);
}
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
}
};
mDrawerLayout.setDrawerListener(mDrawerToggle);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
getSupportActionBar().setIcon(android.R.color.transparent);
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
((ImageView)findViewById(R.id.menu_explore_icon)).setAlpha(0.54f);
((ImageView)findViewById(R.id.menu_projects_icon)).setAlpha(0.54f);
((ImageView)findViewById(R.id.menu_guides_icon)).setAlpha(0.54f);
((ImageView)findViewById(R.id.menu_activity_icon)).setAlpha(0.54f);
((ImageView)findViewById(R.id.menu_settings_icon)).setAlpha(0.54f);
}
buildSideMenu();
if (app == null) { app = (INaturalistApp) getApplicationContext(); }
if (mHelper == null) { mHelper = new ActivityHelper(this);}
// See if we need to display the tutorial (only for the first time using the app)
SharedPreferences preferences = getSharedPreferences("iNaturalistPreferences", MODE_PRIVATE);
boolean firstTime = preferences.getBoolean("first_time", true);
if (firstTime) {
Intent intent = new Intent(this, TutorialActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
intent.putExtra("first_time", true);
startActivity(intent);
} else {
app.detectUserCountryAndUpdateNetwork(this);
}
refreshUserDetails();
((Button)findViewById(R.id.menu_login)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// User not logged-in - redirect to onboarding screen
startActivity(new Intent(BaseFragmentActivity.this, OnboardingActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
}
public void refreshUserDetails() {
SharedPreferences prefs = getSharedPreferences("iNaturalistPreferences", MODE_PRIVATE);
String username = prefs.getString("username", null);
Integer obsCount = prefs.getInt("observation_count", -1);
String userIconUrl = prefs.getString("user_icon_url", null);
if (username != null) {
((TextView)findViewById(R.id.username)).setText(username);
findViewById(R.id.menu_login).setVisibility(View.INVISIBLE);
findViewById(R.id.username).setVisibility(View.VISIBLE);
if (obsCount == -1) {
// Get user details from the server
Intent serviceIntent = new Intent(INaturalistService.ACTION_GET_USER_DETAILS, null, this, INaturalistService.class);
startService(serviceIntent);
}
} else {
findViewById(R.id.menu_login).setVisibility(View.VISIBLE);
findViewById(R.id.username).setVisibility(View.INVISIBLE);
}
if (obsCount > -1) {
if (obsCount == 1) {
((TextView) findViewById(R.id.observation_count)).setText(String.format(getString(R.string.observation_count_single), obsCount));
} else {
((TextView) findViewById(R.id.observation_count)).setText(String.format(getString(R.string.observation_count), obsCount));
}
} else {
String conditions = "(_synced_at IS NULL";
if (username != null) {
conditions += " OR user_login = '" + username + "'";
}
conditions += ") AND (is_deleted = 0 OR is_deleted is NULL)"; // Don't show deleted observations
Cursor cursor = getContentResolver().query(Observation.CONTENT_URI, Observation.PROJECTION, conditions, null, Observation.DEFAULT_SORT_ORDER);
int count = cursor.getCount();
if (count == 1) {
((TextView) findViewById(R.id.observation_count)).setText(String.format(getString(R.string.observation_count_single), count));
} else {
((TextView) findViewById(R.id.observation_count)).setText(String.format(getString(R.string.observation_count), count));
}
cursor.close();
}
if (userIconUrl != null) {
UrlImageViewHelper.setUrlDrawable((ImageView)findViewById(R.id.user_pic), userIconUrl, new UrlImageViewCallback() {
@Override
public void onLoaded(ImageView imageView, Bitmap loadedBitmap, String url, boolean loadedFromCache) {
((ImageView)findViewById(R.id.no_user_pic)).setVisibility(View.GONE);
((ImageView)findViewById(R.id.user_pic)).setVisibility(View.VISIBLE);
}
@Override
public Bitmap onPreSetBitmap(ImageView imageView, Bitmap loadedBitmap, String url, boolean loadedFromCache) {
// Return a circular version of the profile picture
return ImageUtils.getCircleBitmap(loadedBitmap);
}
});
} else {
((ImageView)findViewById(R.id.no_user_pic)).setVisibility(View.VISIBLE);
((ImageView)findViewById(R.id.user_pic)).setVisibility(View.GONE);
}
}
private void buildSideMenu() {
// Only show guides only for Android 4+
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
findViewById(R.id.menu_guides).setVisibility(View.GONE);
}
findViewById(R.id.menu_explore).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivityIfNew(new Intent(BaseFragmentActivity.this, INaturalistMapActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
findViewById(R.id.menu_projects).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivityIfNew(new Intent(BaseFragmentActivity.this, ProjectsActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
findViewById(R.id.menu_guides).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivityIfNew(new Intent(BaseFragmentActivity.this, GuidesActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
findViewById(R.id.menu_activity).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (!isNetworkAvailable()) {
Toast.makeText(getApplicationContext(), R.string.not_connected, Toast.LENGTH_LONG).show();
return;
}
startActivityIfNew(new Intent(BaseFragmentActivity.this, WebActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
findViewById(R.id.menu_settings).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivityIfNew(new Intent(BaseFragmentActivity.this, INaturalistPrefsActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
findViewById(R.id.menu_header).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
startActivityIfNew(new Intent(BaseFragmentActivity.this, ObservationListActivity.class).setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP));
}
});
if (INaturalistMapActivity.class.getName().equals(this.getClass().getName())) {
findViewById(R.id.menu_explore).setBackgroundColor(getResources().getColor(R.color.side_menu_item_bg_current));
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
((ImageView) findViewById(R.id.menu_explore_icon)).setAlpha(1.0f);
}
}
if (ProjectsActivity.class.getName().equals(this.getClass().getName())) {
findViewById(R.id.menu_projects).setBackgroundColor(getResources().getColor(R.color.side_menu_item_bg_current));
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
((ImageView) findViewById(R.id.menu_projects_icon)).setAlpha(1.0f);
}
}
if (GuidesActivity.class.getName().equals(this.getClass().getName())) {
findViewById(R.id.menu_guides).setBackgroundColor(getResources().getColor(R.color.side_menu_item_bg_current));
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
((ImageView) findViewById(R.id.menu_guides_icon)).setAlpha(1.0f);
}
}
if (WebActivity.class.getName().equals(this.getClass().getName())) {
findViewById(R.id.menu_activity).setBackgroundColor(getResources().getColor(R.color.side_menu_item_bg_current));
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
((ImageView) findViewById(R.id.menu_activity_icon)).setAlpha(1.0f);
}
}
if (INaturalistPrefsActivity.class.getName().equals(this.getClass().getName())) {
findViewById(R.id.menu_settings).setBackgroundColor(getResources().getColor(R.color.side_menu_item_bg_current));
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
((ImageView) findViewById(R.id.menu_settings_icon)).setAlpha(1.0f);
}
}
}
private void startActivityIfNew(Intent intent) {
if (intent.getComponent().getClassName().equals(this.getClass().getName())) {
// Activity is already loaded
mDrawerLayout.closeDrawer(mSideMenu);
return;
}
startActivity(intent);
overridePendingTransition(R.anim.show, R.anim.hide);
finish();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == android.R.id.home) {
if (mDrawerLayout.isDrawerOpen(mSideMenu)) {
mDrawerLayout.closeDrawer(mSideMenu);
} else {
mDrawerLayout.openDrawer(mSideMenu);
}
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
mDrawerToggle.syncState();
mUserDetailsReceiver = new UserDetailsReceiver();
IntentFilter filter = new IntentFilter(INaturalistService.ACTION_GET_USER_DETAILS_RESULT);
Log.i(TAG, "Registering ACTION_GET_USER_DETAILS_RESULT");
registerReceiver(mUserDetailsReceiver, filter);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
mDrawerToggle.onConfigurationChanged(newConfig);
}
private boolean isNetworkAvailable() {
ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo activeNetworkInfo = connectivityManager.getActiveNetworkInfo();
return activeNetworkInfo != null && activeNetworkInfo.isConnected();
}
@Override
protected void onResume() {
super.onResume();
if (app == null) { app = (INaturalistApp) getApplicationContext(); }
refreshUserDetails();
}
@Override
protected void onPause() {
super.onPause();
if (mHelper != null) {
mHelper.stopLoading();
}
}
private class UserDetailsReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
Log.i(TAG, "Got GET_USER_DETAILS_RESULT");
BetterJSONObject user = (BetterJSONObject) intent.getSerializableExtra(INaturalistService.USER);
if (user == null) {
return;
}
SharedPreferences prefs = getSharedPreferences("iNaturalistPreferences", MODE_PRIVATE);
SharedPreferences.Editor editor = prefs.edit();
editor.putInt("observation_count", user.getInt("observations_count"));
String iconUrl = user.has("medium_user_icon_url") ? user.getString("medium_user_icon_url") : user.getString("user_icon_url");
editor.putString("user_icon_url", iconUrl);
editor.apply();
refreshUserDetails();
}
}
}
|
package icepick.processor;
import com.google.common.base.Joiner;
import com.google.testing.compile.JavaFileObjects;
import javax.tools.JavaFileObject;
import org.junit.Test;
import static com.google.testing.compile.JavaSourceSubjectFactory.javaSource;
import static icepick.processor.ProcessorTestUtilities.icepickProcessors;
import static org.truth0.Truth.ASSERT;
public class IcepickProcessorTest {
@Test public void fieldsMustNotBePrivate() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test {",
" @Icicle private Object thing;",
"}"));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.failsToCompile();
}
@Test public void fieldsMustNotBeStatic() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test {",
" @Icicle static Object thing;",
"}"));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.failsToCompile();
}
@Test public void fieldsMustNotBeFinal() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test {",
" @Icicle final Object thing;",
"}"));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.failsToCompile();
}
@Test public void classesMustNotBePrivate() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test {",
" private class Inner {",
" @Icicle final Object thing;",
" }",
"}"));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.failsToCompile();
}
@Test public void simple() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test {",
" @Icicle Object thing;",
"}"));
JavaFileObject expectedSource =
JavaFileObjects.forSourceString("test.Test$$Icicle", Joiner.on("\n") .join(
"package test;",
"import static icepick.Icepick.wrap;",
"import static icepick.Icepick.unwrap;",
"import android.os.Bundle;",
"import android.os.Parcelable;",
"public class Test$$Icicle {",
" private static final String BASE_KEY = \"test.Test$$Icicle.\";",
" public static void restoreInstanceState(Test target, Bundle savedInstanceState) {",
" if (savedInstanceState == null) {",
" return;",
" }",
" target.thing = unwrap(savedInstanceState.getParcelable(BASE_KEY + \"thing\"));",
" }",
" public static void saveInstanceState(Test target, Bundle outState) {",
" outState.putParcelable(BASE_KEY + \"thing\", wrap(target.thing));",
" }",
"}"
));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.compilesWithoutError()
.and().generatesSources(expectedSource);
}
@Test public void withParent() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test {",
" @Icicle Object thing;",
"}",
"class TestOne extends Test {",
" @Icicle Object anotherThing;",
"}",
"class TestTwo extends Test {",
"}"
));
JavaFileObject expectedSource1 = JavaFileObjects.forSourceString("test.Test$$Icicle",
Joiner.on('\n').join(
"package test;",
"import static icepick.Icepick.wrap;",
"import static icepick.Icepick.unwrap;",
"import android.os.Bundle;",
"import android.os.Parcelable;",
"public class Test$$Icicle {",
" private static final String BASE_KEY = \"test.Test$$Icicle.\";",
" public static void restoreInstanceState(Test target, Bundle savedInstanceState) {",
" if (savedInstanceState == null) {",
" return;",
" }",
" target.thing = unwrap(savedInstanceState.getParcelable(BASE_KEY + \"thing\"));",
" }",
" public static void saveInstanceState(Test target, Bundle outState) {",
" outState.putParcelable(BASE_KEY + \"thing\", wrap(target.thing));",
" }",
"}"
));
JavaFileObject expectedSource2 = JavaFileObjects.forSourceString("test.TestOne$$Icicle",
Joiner.on('\n') .join(
"package test;",
"import static icepick.Icepick.wrap;",
"import static icepick.Icepick.unwrap;",
"import android.os.Bundle;",
"import android.os.Parcelable;",
"public class TestOne$$Icicle {",
" private static final String BASE_KEY = \"test.TestOne$$Icicle.\";",
" public static void restoreInstanceState(TestOne target, Bundle savedInstanceState) {",
" if (savedInstanceState == null) {",
" return;",
" }",
" target.anotherThing = unwrap(savedInstanceState.getParcelable(BASE_KEY + \"anotherThing\"));",
" test.Test$$Icicle.restoreInstanceState(target, savedInstanceState);",
" }",
" public static void saveInstanceState(TestOne target, Bundle outState) {",
" test.Test$$Icicle.saveInstanceState(target, outState);",
" outState.putParcelable(BASE_KEY + \"anotherThing\", wrap(target.anotherThing));",
" }",
"}"));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.compilesWithoutError()
.and().generatesSources(expectedSource1, expectedSource2);
}
@Test public void withTypedParent() {
JavaFileObject source = JavaFileObjects.forSourceString("test.Test", Joiner.on('\n') .join(
"package test;",
"import icepick.Icicle;",
"public class Test<T> {",
" @Icicle Object thing;",
"}",
"class TestOne extends Test<String> {",
" @Icicle Object anotherThing;",
"}",
"class TestTwo extends Test<Integer> {",
"}"
));
JavaFileObject expectedSource1 = JavaFileObjects.forSourceString("test.Test$$Icicle",
Joiner.on('\n').join(
"package test;",
"import static icepick.Icepick.wrap;",
"import static icepick.Icepick.unwrap;",
"import android.os.Bundle;",
"import android.os.Parcelable;",
"public class Test$$Icicle {",
" private static final String BASE_KEY = \"test.Test$$Icicle.\";",
" public static void restoreInstanceState(Test target, Bundle savedInstanceState) {",
" if (savedInstanceState == null) {",
" return;",
" }",
" target.thing = unwrap(savedInstanceState.getParcelable(BASE_KEY + \"thing\"));",
" }",
" public static void saveInstanceState(Test target, Bundle outState) {",
" outState.putParcelable(BASE_KEY + \"thing\", wrap(target.thing));",
" }",
"}"
));
JavaFileObject expectedSource2 = JavaFileObjects.forSourceString("test.TestOne$$Icicle",
Joiner.on('\n') .join(
"package test;",
"import static icepick.Icepick.wrap;",
"import static icepick.Icepick.unwrap;",
"import android.os.Bundle;",
"import android.os.Parcelable;",
"public class TestOne$$Icicle {",
" private static final String BASE_KEY = \"test.TestOne$$Icicle.\";",
" public static void restoreInstanceState(TestOne target, Bundle savedInstanceState) {",
" if (savedInstanceState == null) {",
" return;",
" }",
" target.anotherThing = unwrap(savedInstanceState.getParcelable(BASE_KEY + \"anotherThing\"));",
" test.Test$$Icicle.restoreInstanceState(target, savedInstanceState);",
" }",
" public static void saveInstanceState(TestOne target, Bundle outState) {",
" test.Test$$Icicle.saveInstanceState(target, outState);",
" outState.putParcelable(BASE_KEY + \"anotherThing\", wrap(target.anotherThing));",
" }",
"}"));
ASSERT.about(javaSource()).that(source)
.processedWith(icepickProcessors())
.compilesWithoutError()
.and().generatesSources(expectedSource1, expectedSource2);
}
}
|
package org.jboss.seam.faces.config;
import java.util.Arrays;
import java.util.Map;
import java.util.Set;
import javax.servlet.ServletContainerInitializer;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.ServletRegistration;
import org.jboss.logging.Logger;
public class FacesServletInitializer implements ServletContainerInitializer
{
private static final Logger log = Logger.getLogger(FacesServletInitializer.class.getName());
private static final String FACES_SERVLET_CLASS_NAME = "javax.faces.webapp.FacesServlet";
private static final String FACES_SERVLET_NAME = "FacesServlet";
|
package org.sagebionetworks;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.UUID;
import org.json.JSONObject;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.sagebionetworks.client.SynapseAdminClient;
import org.sagebionetworks.client.SynapseAdminClientImpl;
import org.sagebionetworks.client.SynapseClient;
import org.sagebionetworks.client.SynapseClientImpl;
import org.sagebionetworks.client.exceptions.SynapseClientException;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.client.exceptions.SynapseNotFoundException;
import org.sagebionetworks.repo.manager.S3TestUtils;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.file.ExternalFileHandle;
import org.sagebionetworks.repo.model.file.ExternalUploadDestination;
import org.sagebionetworks.repo.model.file.FileHandle;
import org.sagebionetworks.repo.model.file.FileHandleResults;
import org.sagebionetworks.repo.model.file.PreviewFileHandle;
import org.sagebionetworks.repo.model.file.S3FileHandle;
import org.sagebionetworks.repo.model.file.S3UploadDestination;
import org.sagebionetworks.repo.model.file.UploadDestination;
import org.sagebionetworks.repo.model.file.UploadDestinationLocation;
import org.sagebionetworks.repo.model.file.UploadType;
import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalStorageLocationSetting;
import org.sagebionetworks.repo.model.project.ExternalSyncSetting;
import org.sagebionetworks.repo.model.project.ProjectSetting;
import org.sagebionetworks.repo.model.project.ProjectSettingsType;
import org.sagebionetworks.repo.model.project.S3StorageLocationSetting;
import org.sagebionetworks.repo.model.project.StorageLocationSetting;
import org.sagebionetworks.repo.model.project.UploadDestinationListSetting;
import org.sagebionetworks.schema.adapter.JSONObjectAdapterException;
import org.sagebionetworks.util.TimedAssert;
import org.sagebionetworks.utils.MD5ChecksumHelper;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.util.BinaryUtils;
import com.google.common.collect.Lists;
public class IT049FileHandleTest {
private static SynapseAdminClient adminSynapse;
private static SynapseClient synapse;
private static Long userToDelete;
private static AmazonS3Client s3Client;
private static final String LARGE_FILE_PATH_PROP_KEY = "org.sagebionetworks.test.large.file.path";
private static final long MAX_WAIT_MS = 1000*10; // 10 sec
private static final String FILE_NAME = "LittleImage.png";
private List<FileHandle> toDelete = null;
private File imageFile;
private Project project;
@BeforeClass
public static void beforeClass() throws Exception {
// Create a user
adminSynapse = new SynapseAdminClientImpl();
SynapseClientHelper.setEndpoints(adminSynapse);
adminSynapse.setUserName(StackConfiguration.getMigrationAdminUsername());
adminSynapse.setApiKey(StackConfiguration.getMigrationAdminAPIKey());
adminSynapse.clearAllLocks();
synapse = new SynapseClientImpl();
userToDelete = SynapseClientHelper.createUser(adminSynapse, synapse);
s3Client = new AmazonS3Client(new BasicAWSCredentials(StackConfiguration.getIAMUserId(), StackConfiguration.getIAMUserKey()));
s3Client.createBucket(StackConfiguration.singleton().getExternalS3TestBucketName());
}
@Before
public void before() throws SynapseException {
toDelete = new ArrayList<FileHandle>();
// Get the image file from the classpath.
URL url = IT049FileHandleTest.class.getClassLoader().getResource("images/"+FILE_NAME);
imageFile = new File(url.getFile().replaceAll("%20", " "));
project = new Project();
project = synapse.createEntity(project);
}
@After
public void after() throws Exception {
for (FileHandle handle: toDelete) {
try {
synapse.deleteFileHandle(handle.getId());
} catch (SynapseNotFoundException e) {
} catch (SynapseClientException e) { }
}
synapse.deleteEntity(project, true);
S3TestUtils.doDeleteAfter(s3Client);
}
@AfterClass
public static void afterClass() throws Exception {
try {
adminSynapse.deleteUser(userToDelete);
} catch (SynapseException e) { }
}
@Test
public void testImageFileRoundTrip() throws SynapseException, IOException, InterruptedException{
assertNotNull(imageFile);
assertTrue(imageFile.exists());
String expectedMD5 = MD5ChecksumHelper.getMD5Checksum(imageFile);
// Create the image
List<File> list = new LinkedList<File>();
list.add(imageFile);
FileHandleResults results = synapse.createFileHandles(list, project.getId());
assertNotNull(results);
// We should have one image on the list
assertNotNull(results.getList());
assertEquals(1, results.getList().size());
S3FileHandle handle = (S3FileHandle) results.getList().get(0);
toDelete.add(handle);
System.out.println(handle);
assertEquals("image/png", handle.getContentType());
assertEquals(FILE_NAME, handle.getFileName());
assertEquals(new Long(imageFile.length()), handle.getContentSize());
assertEquals(expectedMD5, handle.getContentMd5());
// Now wait for the preview to be created.
long start = System.currentTimeMillis();
while(handle.getPreviewId() == null){
System.out.println("Waiting for a preview to be created...");
Thread.sleep(1000);
assertTrue("Timed out waiting for a preview image to be created.", (System.currentTimeMillis()-start) < MAX_WAIT_MS);
handle = (S3FileHandle) synapse.getRawFileHandle(handle.getId());
}
// Get the preview file handle.
PreviewFileHandle preview = (PreviewFileHandle) synapse.getRawFileHandle(handle.getPreviewId());
assertNotNull(preview);
System.out.println(preview);
toDelete.add(preview);
//clear the preview and wait for it to be recreated
synapse.clearPreview(handle.getId());
handle = (S3FileHandle) synapse.getRawFileHandle(handle.getId());
while(handle.getPreviewId() == null){
System.out.println("Waiting for a preview to be recreated...");
Thread.sleep(1000);
assertTrue("Timed out waiting for a preview image to be created.", (System.currentTimeMillis()-start) < MAX_WAIT_MS);
handle = (S3FileHandle) synapse.getRawFileHandle(handle.getId());
}
preview = (PreviewFileHandle) synapse.getRawFileHandle(handle.getPreviewId());
assertNotNull(preview);
toDelete.add(preview);
// Now delete the root file handle.
synapse.deleteFileHandle(handle.getId());
// The main handle and the preview should get deleted.
try{
synapse.getRawFileHandle(handle.getId());
fail("The handle should be deleted.");
}catch(SynapseNotFoundException e){
// expected.
}
try{
synapse.getRawFileHandle(handle.getPreviewId());
fail("The handle should be deleted.");
}catch(SynapseNotFoundException e){
// expected.
}
}
@Test
public void testSingleFileRoundTrip() throws SynapseException, IOException, InterruptedException{
assertNotNull(imageFile);
assertTrue(imageFile.exists());
String expectedMD5 = MD5ChecksumHelper.getMD5Checksum(imageFile);
// Create the image
String myContentType = "test/content-type";
FileHandle result = synapse.createFileHandle(imageFile, myContentType, project.getId());
assertNotNull(result);
S3FileHandle handle = (S3FileHandle) result;
toDelete.add(handle);
System.out.println(handle);
assertEquals(myContentType, handle.getContentType());
assertEquals(FILE_NAME, handle.getFileName());
assertEquals(new Long(imageFile.length()), handle.getContentSize());
assertEquals(expectedMD5, handle.getContentMd5());
//preview will not be created for our test content type
// Now delete the root file handle.
synapse.deleteFileHandle(handle.getId());
// The main handle and the preview should get deleted.
try{
synapse.getRawFileHandle(handle.getId());
fail("The handle should be deleted.");
}catch(SynapseNotFoundException e){
// expected.
}
}
@Test
public void testSingleFileDeprecatedRoundTrip() throws SynapseException, IOException, InterruptedException {
assertNotNull(imageFile);
assertTrue(imageFile.exists());
String expectedMD5 = MD5ChecksumHelper.getMD5Checksum(imageFile);
// Create the image
String myContentType = "test/content-type";
@SuppressWarnings("deprecation")
FileHandle result = synapse.createFileHandle(imageFile, myContentType);
assertNotNull(result);
S3FileHandle handle = (S3FileHandle) result;
toDelete.add(handle);
System.out.println(handle);
assertEquals(myContentType, handle.getContentType());
assertEquals(FILE_NAME, handle.getFileName());
assertEquals(new Long(imageFile.length()), handle.getContentSize());
assertEquals(expectedMD5, handle.getContentMd5());
// preview will not be created for our test content type
// Now delete the root file handle.
synapse.deleteFileHandle(handle.getId());
// The main handle and the preview should get deleted.
try {
synapse.getRawFileHandle(handle.getId());
fail("The handle should be deleted.");
} catch (SynapseNotFoundException e) {
// expected.
}
}
@Test
public void testExternalRoundTrip() throws JSONObjectAdapterException, SynapseException{
ExternalFileHandle efh = new ExternalFileHandle();
efh.setContentType("text/plain");
efh.setFileName("foo.bar");
efh.setExternalURL("http://google.com");
// Save it
ExternalFileHandle clone = synapse.createExternalFileHandle(efh);
assertNotNull(clone);
toDelete.add(clone);
assertNotNull(clone.getId());
assertNotNull(clone.getCreatedBy());
assertNotNull(clone.getCreatedOn());
assertNotNull(clone.getEtag());
assertEquals(efh.getFileName(), clone.getFileName());
assertEquals(efh.getExternalURL(), clone.getExternalURL());
assertEquals(efh.getContentType(), clone.getContentType());
}
@Test
public void testExternalRoundTripWithNulls() throws JSONObjectAdapterException, SynapseException{
// Us a null name and content type
ExternalFileHandle efh = new ExternalFileHandle();
efh.setContentType(null);
efh.setFileName(null);
efh.setExternalURL("http://google.com");
// Save it
ExternalFileHandle clone = synapse.createExternalFileHandle(efh);
assertNotNull(clone);
toDelete.add(clone);
assertNotNull(clone.getId());
assertNotNull(clone.getCreatedBy());
assertNotNull(clone.getCreatedOn());
assertNotNull(clone.getEtag());
assertEquals("NOT_SET", clone.getFileName());
assertEquals(efh.getExternalURL(), clone.getExternalURL());
assertEquals("NOT_SET", clone.getContentType());
}
@Test
public void testProjectSettingsCrud() throws SynapseException, IOException, InterruptedException {
// create an project setting
UploadDestinationListSetting projectSetting = new UploadDestinationListSetting();
projectSetting.setProjectId(project.getId());
projectSetting.setSettingsType(ProjectSettingsType.upload);
ExternalStorageLocationSetting externalDestination = new ExternalStorageLocationSetting();
externalDestination.setUploadType(UploadType.HTTPS);
externalDestination.setUrl("https://notvalid.com");
externalDestination.setBanner("warning, at institute");
externalDestination.setDescription("not in synapse, this is");
List<StorageLocationSetting> settings = synapse.getMyStorageLocationSettings();
assertFalse(settings.contains(externalDestination));
externalDestination = synapse.createStorageLocationSetting(externalDestination);
settings = synapse.getMyStorageLocationSettings();
assertTrue(settings.contains(externalDestination));
StorageLocationSetting settingsClone = synapse.getMyStorageLocationSetting(externalDestination.getStorageLocationId());
assertEquals(externalDestination, settingsClone);
projectSetting.setLocations(Lists.newArrayList(externalDestination.getStorageLocationId()));
ProjectSetting created = synapse.createProjectSetting(projectSetting);
assertEquals(project.getId(), created.getProjectId());
assertEquals(ProjectSettingsType.upload, created.getSettingsType());
assertEquals(UploadDestinationListSetting.class, created.getClass());
assertEquals(projectSetting.getLocations(), ((UploadDestinationListSetting) created).getLocations());
ProjectSetting clone = synapse.getProjectSetting(project.getId(), ProjectSettingsType.upload);
assertEquals(created, clone);
UploadDestination uploadDestination = synapse.getUploadDestination(project.getId(), externalDestination.getStorageLocationId());
assertEquals(externalDestination.getUploadType(), uploadDestination.getUploadType());
assertEquals(externalDestination.getBanner(), uploadDestination.getBanner());
assertEquals(externalDestination.getStorageLocationId(), uploadDestination.getStorageLocationId());
synapse.deleteProjectSetting(created.getId());
assertNull(synapse.getProjectSetting(project.getId(), ProjectSettingsType.upload));
}
@Test
public void testExternalUploadDestinationUploadAndModifyRoundTrip() throws Exception {
String baseKey = "test-" + UUID.randomUUID();
// we need to create a authentication object
String username = synapse.getUserSessionData().getProfile().getUserName();
S3TestUtils.createObjectFromString(StackConfiguration.singleton().getExternalS3TestBucketName(), baseKey + "owner.txt", username,
s3Client);
// create setting
ExternalS3StorageLocationSetting externalS3Destination = new ExternalS3StorageLocationSetting();
externalS3Destination.setUploadType(UploadType.S3);
externalS3Destination.setEndpointUrl(null);
externalS3Destination.setBucket(StackConfiguration.singleton().getExternalS3TestBucketName());
externalS3Destination.setBaseKey(baseKey);
externalS3Destination.setBanner("warning, at institute");
externalS3Destination.setDescription("not in synapse, this is");
externalS3Destination = synapse.createStorageLocationSetting(externalS3Destination);
UploadDestinationListSetting projectSetting = new UploadDestinationListSetting();
projectSetting.setProjectId(project.getId());
projectSetting.setSettingsType(ProjectSettingsType.upload);
projectSetting.setLocations(Lists.newArrayList(externalS3Destination.getStorageLocationId()));
synapse.createProjectSetting(projectSetting);
String myContentType = "test/content-type";
FileHandle result = synapse.createFileHandle(imageFile, myContentType, project.getId());
toDelete.add(result);
assertEquals(S3FileHandle.class, result.getClass());
assertEquals(externalS3Destination.getStorageLocationId(), result.getStorageLocationId());
File tmpFile = File.createTempFile(imageFile.getName(), ".tmp");
synapse.downloadFromFileHandleTemporaryUrl(result.getId(), tmpFile);
FileHandle result2 = synapse.createFileHandle(imageFile, myContentType, false, project.getId(), result.getStorageLocationId());
toDelete.add(result2);
assertEquals(S3FileHandle.class, result2.getClass());
assertEquals(externalS3Destination.getStorageLocationId(), result2.getStorageLocationId());
assertTrue(result2 instanceof S3FileHandle);
S3FileHandle result2S3 = (S3FileHandle) result2;
// Create an external file handle using the external location.
S3FileHandle externalS3 = new S3FileHandle();
externalS3.setBucketName(result2S3.getBucketName());
externalS3.setKey(result2S3.getKey());
externalS3.setFileName(result2S3.getFileName());
externalS3.setStorageLocationId(result.getStorageLocationId());
// create it
externalS3 = synapse.createExternalS3FileHandle(externalS3);
assertNotNull(externalS3);
assertNotNull(externalS3.getId());
}
@Test
public void testAutoSyncRoundTrip() throws Exception {
String baseKey = "test-" + UUID.randomUUID();
// we need to create a authentication object
String username = synapse.getUserSessionData().getProfile().getUserName();
S3TestUtils.createObjectFromString(StackConfiguration.singleton().getExternalS3TestBucketName(), baseKey + "owner.txt", username,
s3Client);
final String md5 = S3TestUtils.createObjectFromString(StackConfiguration.singleton().getExternalS3TestBucketName(), baseKey
+ "file1.txt", UUID.randomUUID().toString(), s3Client);
// create setting
ExternalS3StorageLocationSetting externalS3Destination = new ExternalS3StorageLocationSetting();
externalS3Destination.setUploadType(UploadType.S3);
externalS3Destination.setEndpointUrl(null);
externalS3Destination.setBucket(StackConfiguration.singleton().getExternalS3TestBucketName());
externalS3Destination.setBaseKey(baseKey);
externalS3Destination.setBanner("warning, at institute");
externalS3Destination.setDescription("not in synapse, this is");
externalS3Destination = synapse.createStorageLocationSetting(externalS3Destination);
ExternalSyncSetting externalSyncSetting = new ExternalSyncSetting();
externalSyncSetting.setLocationId(externalS3Destination.getStorageLocationId());
externalSyncSetting.setAutoSync(true);
externalSyncSetting.setProjectId(project.getId());
externalSyncSetting.setSettingsType(ProjectSettingsType.external_sync);
synapse.createProjectSetting(externalSyncSetting);
TimedAssert.waitForAssert(30000, 500, new Runnable() {
@Override
public void run() {
try {
JSONObject query = synapse.query("select name from entity where parentId == '" + project.getId() + "' LIMIT_1_OFFSET_1");
assertEquals(1L, query.getInt("totalNumberOfResults"));
assertEquals("file1.txt", query.getJSONArray("results").getJSONObject(0).getString("entity.name"));
String hexMD5 = BinaryUtils.toHex(BinaryUtils.fromBase64(md5));
assertEquals(1, synapse.getEntityHeaderByMd5(hexMD5).size());
} catch (SynapseNotFoundException e) {
fail();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
});
}
@Test
public void testExternalUploadDestinationChoice() throws SynapseException, IOException, InterruptedException {
// create an project setting
ExternalStorageLocationSetting externalDestination = new ExternalStorageLocationSetting();
externalDestination.setUploadType(UploadType.SFTP);
externalDestination.setUrl("sftp://somewhere.com");
externalDestination.setBanner("warning, at institute");
externalDestination.setDescription("not in synapse, this is");
externalDestination = synapse.createStorageLocationSetting(externalDestination);
S3StorageLocationSetting internalS3Destination = new S3StorageLocationSetting();
internalS3Destination.setUploadType(UploadType.S3);
internalS3Destination.setBanner("warning, not at institute");
internalS3Destination = synapse.createStorageLocationSetting(internalS3Destination);
UploadDestinationListSetting projectSetting = new UploadDestinationListSetting();
projectSetting.setProjectId(project.getId());
projectSetting.setSettingsType(ProjectSettingsType.upload);
projectSetting.setLocations(Lists.newArrayList(externalDestination.getStorageLocationId(),
internalS3Destination.getStorageLocationId()));
synapse.createProjectSetting(projectSetting);
UploadDestinationLocation[] uploadDestinationLocations = synapse.getUploadDestinationLocations(project.getId());
assertEquals(2, uploadDestinationLocations.length);
assertEquals(externalDestination.getStorageLocationId(), uploadDestinationLocations[0].getStorageLocationId());
assertEquals(internalS3Destination.getStorageLocationId(), uploadDestinationLocations[1].getStorageLocationId());
UploadDestination uploadDestination = synapse.getUploadDestination(project.getId(),
uploadDestinationLocations[0].getStorageLocationId());
assertEquals(UploadType.SFTP, uploadDestination.getUploadType());
assertEquals(externalDestination.getStorageLocationId(), uploadDestination.getStorageLocationId());
assertEquals(ExternalUploadDestination.class, uploadDestination.getClass());
uploadDestination = synapse.getUploadDestination(project.getId(), uploadDestinationLocations[1].getStorageLocationId());
assertEquals(UploadType.S3, uploadDestination.getUploadType());
assertEquals(internalS3Destination.getStorageLocationId(), uploadDestination.getStorageLocationId());
assertEquals(S3UploadDestination.class, uploadDestination.getClass());
}
@Test
public void testCreateSFTPExternalFile() throws Exception {
ExternalFileHandle efh = new ExternalFileHandle();
efh.setContentType(null);
efh.setFileName(null);
efh.setExternalURL("sftp://somewhere.com");
ExternalFileHandle clone = synapse.createExternalFileHandle(efh);
toDelete.add(clone);
}
/**
* This test uploads files that are too large to include in the build.
* To run this test, set the property to point to a large file: org.sagebionetworks.test.large.file.path=<path to large file>
* @throws IOException
* @throws SynapseException
*/
@Test
public void testLargeFileUplaod() throws SynapseException, IOException{
String largeFileName = System.getProperty(LARGE_FILE_PATH_PROP_KEY);
if(largeFileName != null){
// Run the test
File largeFile = new File(largeFileName);
assertTrue(largeFile.exists());
System.out.println("Attempting to upload a file of size: "+largeFile.length());
float fileSize = largeFile.length();
float bytesPerMB = (float) Math.pow(2, 20);
float fileSizeMB = fileSize/bytesPerMB;
System.out.println(String.format("Attempting to upload file: %1$s of size %2$.2f", largeFile.getName(), fileSizeMB));
String contentType = SynapseClientImpl.guessContentTypeFromStream(largeFile);
long start = System.currentTimeMillis();
FileHandle handle = synapse.createFileHandle(largeFile, contentType, project.getId());
long elapse = System.currentTimeMillis()-start;
float elapseSecs = elapse/1000;
float mbPerSec = fileSizeMB/elapseSecs;
System.out.println(String.format("Upload file: %1$s of size %2$.2f in %3$.2f secs with rate %4$.2f MB/Sec", largeFile.getName(), fileSizeMB, elapseSecs, mbPerSec));
assertNotNull(handle);
toDelete.add(handle);
}else{
System.out.println("The property: '"+LARGE_FILE_PATH_PROP_KEY+"' was not set. The testLargeFileUplaod() test was not run");
}
}
}
|
package org.intermine.web.logic.query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.intermine.api.bag.BagManager;
import org.intermine.api.bag.BagQueryConfig;
import org.intermine.api.config.ClassKeyHelper;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.Profile;
import org.intermine.api.template.SwitchOffAbility;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.objectstore.ObjectStoreSummary;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.pathquery.ConstraintValueParser;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathConstraint;
import org.intermine.pathquery.PathConstraintAttribute;
import org.intermine.pathquery.PathConstraintBag;
import org.intermine.pathquery.PathConstraintLookup;
import org.intermine.pathquery.PathConstraintLoop;
import org.intermine.pathquery.PathConstraintMultiValue;
import org.intermine.pathquery.PathConstraintNull;
import org.intermine.pathquery.PathConstraintSubclass;
import org.intermine.pathquery.PathException;
import org.intermine.pathquery.PathQuery;
import org.intermine.util.StringUtil;
import org.intermine.web.autocompletion.AutoCompleter;
import org.intermine.web.logic.querybuilder.DisplayPath;
/**
* Representation of a PathQuery constraint for use by JSP pages. This object provides methods
* needed to populate constraint editing boxes and dropdowns, find available bag names, etc. Can
* either represent a new constraint to be added with no values set or an existing constraint that
* is being edited.
*
* Get methods return null if no values are available
*
* @author Richard Smith
*/
public class DisplayConstraint
{
private Path path;
private List<DisplayConstraintOption> validOps;
private AutoCompleter ac;
private ObjectStoreSummary oss;
private String endCls;
private String fieldName;
private BagQueryConfig bagQueryConfig;
private Map<String, List<FieldDescriptor>> classKeys;
private BagManager bagManager;
private Profile profile;
private String constraintLabel;
private List<DisplayConstraintOption> fixedOps;
private PathConstraint con;
private PathQuery query;
private String code;
private boolean editableInTemplate;
private SwitchOffAbility switchOffAbility;
private boolean isBagSelected;
private String selectedBagValue;
private ConstraintOp selectedBagOp;
private List<Object> templateSummary;
/**
* Construct for a new constraint that is being added to a query.
* @param path The path that is being constrained
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
}
/**
* Construct for an existing constraint that is being edited.
* @param path The path that is being constrained
* @param con the constraint being edited
* @param label text associated with this constraint, if a template query
* @param code the code of this constraint in the query
* @param editableInTemplate true if this is a template query and this constraint is editable
* @param switchOffAbility if the contraint is on, off, locked
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, PathConstraint con, String label, String code,
boolean editableInTemplate, SwitchOffAbility switchOffAbility, Profile profile,
PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager,
List<Object> templateSummary) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
this.con = con;
this.constraintLabel = label;
this.code = code;
this.editableInTemplate = editableInTemplate;
this.switchOffAbility = switchOffAbility;
this.templateSummary = templateSummary;
}
private void init(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
this.path = path;
this.ac = ac;
this.oss = oss;
this.endCls = getEndClass(path);
this.fieldName = getFieldName(path);
this.bagQueryConfig = bagQueryConfig;
this.classKeys = classKeys;
this.profile = profile;
this.query = query;
this.bagManager = bagManager;
this.isBagSelected = false;
}
private String getEndClass(Path path) {
if (path.isRootPath()) {
return path.getStartClassDescriptor().getType().getSimpleName();
} else {
return path.getLastClassDescriptor().getType().getSimpleName();
}
}
private String getFieldName(Path path) {
if (!path.isRootPath()) {
return path.getLastElement();
}
return null;
}
// TODO this should be in some common code
private String constraintStringValue(PathConstraint con) {
if (con instanceof PathConstraintAttribute) {
return ((PathConstraintAttribute) con).getValue();
} else if (con instanceof PathConstraintBag) {
return ((PathConstraintBag) con).getBag();
} else if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getValue();
} else if (con instanceof PathConstraintSubclass) {
return ((PathConstraintSubclass) con).getType();
} else if (con instanceof PathConstraintLoop) {
return ((PathConstraintLoop) con).getLoopPath();
} else if (con instanceof PathConstraintNull) {
return ((PathConstraintNull) con).getOp().toString();
}
return null;
}
/**
* If editing an existing constraint get the code for this constraint in the query, return null
* if creating a new constraint.
* @return the constraint code or null
*/
public String getCode() {
return code;
}
/**
* Return true if editing an existing template constraint and that constraint is editable.
* @return true if an editable template constraint, or null
*/
public boolean isEditableInTemplate() {
return editableInTemplate;
}
/**
* Get a representation of the path that is being constraint. DisplayPath provides convenience
* methods for use in JSP.
* @return the path being constrained
*/
public DisplayPath getPath() {
return new DisplayPath(path);
}
/**
* If editing an existing constraint, return the selected value. Otherwise return null. If
* an attribute constraint this will be the user entered. If a bag constraint, the selected
* bag name, etc. If an attribute constraint, but the use bag is setted, this will be the
* selectedBagValue setted
* @return the selected value or null
*/
public String getSelectedValue() {
if (isBagSelected) {
return selectedBagValue;
}
if (con != null) {
return constraintStringValue(con);
}
return null;
}
/**
* Returns the value collection if the constraint is a multivalue, otherwise return null.
*
* @return a Collection of Strings
*/
public Collection<String> getMultiValues() {
if (isMultiValueSelected()) {
return ((PathConstraintMultiValue) con).getValues();
}
return null;
}
/**
* If the constraint is a multivalue, returns the value collection
* represented as string separated by ',', otherwise return an empty String.
*
* @return a String representing the multivalues of constraint
*/
public String getMultiValuesAsString() {
String multiValuesAsString = "";
if (getMultiValues() != null) {
for (String value : getMultiValues()) {
multiValuesAsString += value + ",";
}
}
return multiValuesAsString;
}
/**
* Return true if editing an existing constraint and a bag has been selected.
* @return true if a bag has been selected
*/
public boolean isBagSelected() {
if (isBagSelected) {
return isBagSelected;
} else {
return (con != null && con instanceof PathConstraintBag);
}
}
/**
* Set if the bag is selected, used by the method isBagSelected that returns true,
* even if the constraint is an attribute constraint
* @param isBagSelected true if a bag has been selected
*/
public void setBagSelected(boolean isBagSelected) {
this.isBagSelected = isBagSelected;
}
/**
* Return true if editing an existing constraint and 'has a value' or 'has no value' has been
* selected.
* @return true if a null constraint was selected
*/
public boolean isNullSelected() {
return (con != null && con instanceof PathConstraintNull);
}
public boolean isBoolean() {
String type = getPath().getType();
return ("boolean".equals(type) || "java.lang.Boolean".equals(type));
}
/**
* Return true if editing an existing constraint and an attribute value or LOOKUP constraint
* was selected.
* @return true if an attribute/LOOKUP constraint was selected
*/
public boolean isValueSelected() {
if (con != null) {
return !(isBagSelected() || isNullSelected() || isLoopSelected());
}
return false;
}
/**
* Return true if editing an existing constraint and a loop value has been
* selected.
* @return true if a loop constraint was selected
*/
public boolean isLoopSelected() {
return (con != null && con instanceof PathConstraintLoop);
}
/**
* Return true if editing an existing constraint and a multivalue has been
* selected.
* @return true if a multivalue constraint was selected
*/
public boolean isMultiValueSelected() {
return (con != null && con instanceof PathConstraintMultiValue);
}
/**
* Return the last class in the path and fieldname as the title for the constraint.
* @return the title of this constraint
*/
public String getTitle() {
return endCls + (fieldName == null ? "" : " " + fieldName);
}
/**
* Return the label associated with a constraint if editing a template query constraint.
* @return the constraint label
*/
public String getDescription() {
return constraintLabel;
}
/**
* Return a help message to display alongside the constraint, this will examine the constraint
* type and generate and appropriate message, e.g. list the key fields for LOOKUP constraints
* and explain the use of wildcards. Returns null when there is no appropriate help.
* @return the help message or null
*/
public String getHelpMessage() {
return DisplayConstraintHelpMessages.getHelpMessage(this);
}
/**
* If the bag is selected, return the value setted with the method setSelectedBagOp
* If editing an existing constraint return the operation used.
* Otherwise return null.
* @return the selected constraint op or null
*/
public DisplayConstraintOption getSelectedOp() {
if (isBagSelected) {
return new DisplayConstraintOption(selectedBagOp.toString(),
selectedBagOp.getIndex());
}
if (con != null) {
ConstraintOp selectedOp = con.getOp();
if (selectedOp != null) {
return new DisplayConstraintOption(selectedOp.toString(), selectedOp.getIndex());
}
}
return null;
}
/**
* Set the seletedBagOp
* @param selectedBagOp the constraint op returned by the method getSelectedOp()
* if the bag is selected
*/
public void setSelectedBagOp(ConstraintOp selectedBagOp) {
this.selectedBagOp = selectedBagOp;
}
/**
* Set the seletedBagValue returned bye the getSelectedValue if the bag is selected
* @param selectedBagValue string to set the selectedBagValue
*/
public void setSelectedBagValue(String selectedBagValue) {
this.selectedBagValue = selectedBagValue;
}
/**
* If editing an existing LOOKUP constraint return the value selected for the extra constraint
* field. Otherwise return null
* @return the LOOKUP constraint extra value or null
*/
public String getSelectedExtraValue() {
if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getExtraValue();
}
return null;
}
/**
* Given the path being constrained return the valid constraint operations. If constraining an
* attribute the valid ops depend on the type being constraint - String, Integer, Boolean, etc.
* @return the valid constraint operations
*/
public List<DisplayConstraintOption> getValidOps() {
if (validOps != null) {
return validOps;
}
validOps = new ArrayList<DisplayConstraintOption>();
if (con instanceof PathConstraintBag) {
for (ConstraintOp op : PathConstraintBag.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
} else if (con instanceof PathConstraintSubclass) {
return validOps;
} else if (con instanceof PathConstraintLoop) {
List<DisplayConstraintOption> loopQueryOps = getLoopQueryOps();
for (DisplayConstraintOption dco : loopQueryOps) {
validOps.add(dco);
}
} else if (path.endIsAttribute()) {
List<ConstraintOp> allOps = SimpleConstraint.validOps(path.getEndType());
// TODO This was in the constraint jsp:
// <c:if test="${!(editingNode.type == 'String' && (op.value == '<='
//|| op.value == '>='))}">
// TODO this should show different options if a dropdown is to be used
boolean existPossibleValues =
(getPossibleValues() != null && getPossibleValues().size() > 0) ? true : false;
for (ConstraintOp op : allOps) {
if (!existPossibleValues && (op.getIndex() == 6 || op.getIndex() == 7)) {
continue;
}
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
if (existPossibleValues) {
for (ConstraintOp op : PathConstraintMultiValue.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(),
op.getIndex()));
}
}
} else if (isLookup()) {
// this must be a LOOKUP constraint
ConstraintOp lookup = ConstraintOp.LOOKUP;
validOps.add(new DisplayConstraintOption(lookup.toString(), lookup.getIndex()));
}
return validOps;
}
/**
* Returns the set of operators valid for loop constraints.
*
* @return a List of DisplayConstraintOption objects
*/
public List<DisplayConstraintOption> getLoopQueryOps() {
return Arrays.asList(new DisplayConstraintOption(ConstraintOp.EQUALS.toString(),
ConstraintOp.EQUALS.getIndex()),
new DisplayConstraintOption(ConstraintOp.NOT_EQUALS.toString(),
ConstraintOp.NOT_EQUALS.getIndex()));
}
/**
* Return true if this constraint should be a LOOKUP, true if constraining a class (ref/col)
* instead of an attribute and that class has class keys defined.
* @return true if this constraint should be a LOOKUP
*/
public boolean isLookup() {
return !path.endIsAttribute() && ClassKeyHelper.hasKeyFields(classKeys, endCls);
}
/**
* Return the LOOKUP constraint op.
* @return the LOOKUP constraint op
*/
// TOOO do we need this? validOps should contain correct value
public DisplayConstraintOption getLookupOp() {
ConstraintOp lookup = ConstraintOp.LOOKUP;
return new DisplayConstraintOption(lookup.toString(), lookup.getIndex());
}
/**
* Return the autocompleter for this path if one is available. Otherwise return null.
* @return an autocompleter for this path or null
*/
public AutoCompleter getAutoCompleter() {
if (ac != null && ac.hasAutocompleter(endCls, fieldName)) {
return ac;
}
return null;
}
/**
* Values to populate a dropdown for the path if possible values are available.
* @return possible values to populate a dropdown
*/
public List<Object> getPossibleValues() {
String className = "";
if (path.isRootPath()) {
className = path.getStartClassDescriptor().getType().getCanonicalName();
} else {
className = path.getLastClassDescriptor().getType().getCanonicalName();
}
// if this is a template, it may have been summarised so we have a restricted set if values
// for particular paths (the TemplateSummariser runs queries to work out exact values
// constraints could take given the other constraints in the query.
if (templateSummary != null && !templateSummary.isEmpty()) {
return templateSummary;
}
// otherwise, we may have possible values from the ObjectStoreSummary
List<Object> fieldValues = oss.getFieldValues(className, fieldName);
if (path.endIsAttribute()) {
Class<?> type = path.getEndType();
if (Date.class.equals(type)) {
List<Object> fieldValueFormatted = new ArrayList<Object>();
for (Object obj : fieldValues) {
fieldValueFormatted.add(ConstraintValueParser.format((String) obj));
}
return fieldValueFormatted;
}
}
return fieldValues;
}
/**
* If a dropdown is available for a constraint fewer operations are possible, return the list
* of operations.
* @return the constraint ops available when selecting values from a dropdown
*/
// TODO Do we need this, could getValildOps return the correct ops if a dropdown is available
public List<DisplayConstraintOption> getFixedOps() {
if (fixedOps != null) {
return fixedOps;
}
if (getPossibleValues() != null) {
fixedOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : SimpleConstraint.fixedEnumOps(path.getEndType())) {
fixedOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
}
return fixedOps;
}
/**
* Return true if this is a LOOKUP constraint and an extra constraint should be available.
* @return true if an extra constraint option is available
*/
public boolean isExtraConstraint() {
if (isLookup()) {
String extraValueFieldName = bagQueryConfig.getConnectField();
ClassDescriptor cld = (path.isRootPath()) ? path.getStartClassDescriptor()
: path.getLastClassDescriptor();
ReferenceDescriptor fd = cld.getReferenceDescriptorByName(extraValueFieldName, true);
return fd != null;
} else {
return false;
}
}
/**
* If a LOOKUP constraint and an extra constraint is available for this path, return a list of
* the possible values for populating a dropdown. Otherwise return null.
* @return a list of possible extra constraint values
*/
public List<Object> getExtraConstraintValues() {
if (isExtraConstraint()) {
String extraValueFieldName = bagQueryConfig.getConstrainField();
return oss.getFieldValues(bagQueryConfig.getExtraConstraintClassName(),
extraValueFieldName);
}
return null;
}
/**
* If a LOOKUP constraint and an extra value constraint is available return the classname of
* the extra constraint so it can be displayed. Otherwise return null.
* @return the extra constraint class name or null
*/
public String getExtraConstraintClassName() {
if (isExtraConstraint()) {
String[] splitClassName = bagQueryConfig.getExtraConstraintClassName().split("[.]");
return splitClassName[splitClassName.length - 1];
//return bagQueryConfig.getExtraConstraintClassName();
}
return null;
}
/**
* Return the key fields for this path as a formatted string, for use in LOOKUP help message.
* @return a formatted string listing key fields for this path
*/
public String getKeyFields() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)) {
return StringUtil.prettyList(ClassKeyHelper.getKeyFieldNames(classKeys, endCls), true);
}
return null;
}
/**
* Get a list of public and user bag names available for this path. If none available return
* null.
* @return a list of available bag names or null
*/
public List<String> getBags() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)) {
Map<String, InterMineBag> bags =
bagManager.getUserOrGlobalBagsOfType(profile, endCls);
if (!bags.isEmpty()) {
return new ArrayList<String>(bags.keySet());
}
}
return null;
}
/**
* Return the valid constraint ops when constraining on a bag.
* @return the possible bag constraint operations
*/
public List<DisplayConstraintOption> getBagOps() {
List<DisplayConstraintOption> bagOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : BagConstraint.VALID_OPS) {
bagOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
return bagOps;
}
/**
* Returns the bag type that the constraint can be constrained to.
* If there aren't bags return null
*
* @return a String
*/
public String getBagType() {
if (getBags() != null) {
return endCls;
} else {
return null;
}
}
/**
* Returns the constraint type selected.
*
* @return a String representing the constraint type selected
*/
public String getSelectedConstraint() {
if (isBagSelected()) {
return "bag";
} else if (isNullSelected()) {
return "empty";
} else if (isLoopSelected()) {
return "loopQuery";
}
return "attribute";
}
/**
* Returns the set of paths that could feasibly be loop constrained onto the constraint's path,
* given the query's outer join situation. A candidate path must be a class path, of the same
* type, and in the same outer join group.
*
* @return a Set of String paths that could be loop joined
* @throws PathException if something goes wrong
*/
public Set<String> getCandidateLoops() throws PathException {
if (path.endIsAttribute()) {
return Collections.emptySet();
} else {
if (con instanceof PathConstraintLoop) {
Set<String> retval = new LinkedHashSet<String>();
retval.add(((PathConstraintLoop) con).getLoopPath());
retval.addAll(query.getCandidateLoops(path.getNoConstraintsString()));
return retval;
} else {
return query.getCandidateLoops(path.getNoConstraintsString());
}
}
}
/**
* Return true if the constraint is locked, it should'n be enabled or disabled.
* @return true if the constraint is locked
*/
public boolean isLocked() {
if (switchOffAbility == null || switchOffAbility == SwitchOffAbility.LOCKED) {
return true;
}
return false;
}
/**
* Return true if the constraint is enabled, false if it is disabled or locked.
* @return true if the constraint is enabled,false if it is disabled or locked
*/
public boolean isEnabled() {
if (switchOffAbility == SwitchOffAbility.ON) {
return true;
}
return false;
}
/**
* Return true if the constraint is disabled, false if it is enabled or locked.
* @return true if the constraint is disabled,false if it is enabled or locked
*/
public boolean isDisabled() {
if (switchOffAbility == SwitchOffAbility.OFF) {
return true;
}
return false;
}
/**
* Return the value on, off, locked depending on the constraint SwitchOffAbility .
* @return switchable property (on, off, locked)
*/
public String getSwitchable() {
if (SwitchOffAbility.ON.equals(switchOffAbility)) {
return SwitchOffAbility.ON.toString().toLowerCase();
} else if (SwitchOffAbility.OFF.equals(switchOffAbility)) {
return SwitchOffAbility.OFF.toString().toLowerCase();
} else {
return SwitchOffAbility.LOCKED.toString().toLowerCase();
}
}
/**
* Set the switchOffAbility
* @param switchOffAbility value
*/
public void setSwitchOffAbility(SwitchOffAbility switchOffAbility) {
this.switchOffAbility = switchOffAbility;
}
/**
* Return true if the input field can be displayed, method for use in JSP
* @return true if the input is displayed
*/
public boolean isInputFieldDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()) {
return true;
}
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
if (con instanceof PathConstraintBag) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
/**
* Return true if the drop-down containing the possibleValues can be displayed,
* method for use in JSP
* @return true if the drop-down is displayed
*/
public boolean isPossibleValuesDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()
|| selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
/**
* Return true if the multi-select containing the possibleValue can be displayed,
* method for use in JSP
* @return true if the multi-select is displayed
*/
public boolean isMultiValuesDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return true;
}
return false;
} return false;
}
/**
* Representation of a constraint operation to populate a dropdown. Label is value to be
* displayed in the dropdown, property is the index of the constraint that will be selected.
* @author Richard Smith
*
*/
public class DisplayConstraintOption
{
private String label;
private Integer property;
/**
* Construct with the constraint lable and index
* @param label the value to be shown in dropdown
* @param property the constraint index to be added to form on selection
*/
public DisplayConstraintOption(String label, Integer property) {
this.label = label;
this.property = property;
}
/**
* Get the value to be displayed in the dropdown for this operation.
* @return the display value
*/
public String getLabel() {
return label;
}
/**
* Get the constraint index to be put in form when this op is selected.
* @return the constraint index
*/
public Integer getProperty() {
return property;
}
}
}
|
package org.intermine.web.logic.query;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Collection;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.intermine.api.bag.BagManager;
import org.intermine.api.bag.BagQueryConfig;
import org.intermine.api.config.ClassKeyHelper;
import org.intermine.api.profile.InterMineBag;
import org.intermine.api.profile.Profile;
import org.intermine.api.template.SwitchOffAbility;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.FieldDescriptor;
import org.intermine.metadata.ReferenceDescriptor;
import org.intermine.objectstore.ObjectStoreSummary;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.objectstore.query.ConstraintOp;
import org.intermine.objectstore.query.SimpleConstraint;
import org.intermine.pathquery.ConstraintValueParser;
import org.intermine.pathquery.Path;
import org.intermine.pathquery.PathConstraint;
import org.intermine.pathquery.PathConstraintAttribute;
import org.intermine.pathquery.PathConstraintBag;
import org.intermine.pathquery.PathConstraintLookup;
import org.intermine.pathquery.PathConstraintLoop;
import org.intermine.pathquery.PathConstraintMultiValue;
import org.intermine.pathquery.PathConstraintNull;
import org.intermine.pathquery.PathConstraintSubclass;
import org.intermine.pathquery.PathException;
import org.intermine.pathquery.PathQuery;
import org.intermine.util.StringUtil;
import org.intermine.web.autocompletion.AutoCompleter;
import org.intermine.web.logic.querybuilder.DisplayPath;
/**
* Representation of a PathQuery constraint for use by JSP pages. This object provides methods
* needed to populate constraint editing boxes and dropdowns, find available bag names, etc. Can
* either represent a new constraint to be added with no values set or an existing constraint that
* is being edited.
*
* Get methods return null if no values are available
*
* @author Richard Smith
*/
public class DisplayConstraint
{
private Path path;
private List<DisplayConstraintOption> validOps;
private AutoCompleter ac;
private ObjectStoreSummary oss;
private String endCls;
private String fieldName;
private BagQueryConfig bagQueryConfig;
private Map<String, List<FieldDescriptor>> classKeys;
private BagManager bagManager;
private Profile profile;
private String constraintLabel;
private List<DisplayConstraintOption> fixedOps;
private PathConstraint con;
private PathQuery query;
private String code;
private boolean editableInTemplate;
private SwitchOffAbility switchOffAbility;
private boolean isBagSelected;
private String selectedBagValue;
private ConstraintOp selectedBagOp;
private List<Object> templateSummary;
/**
* Construct for a new constraint that is being added to a query.
* @param path The path that is being constrained
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
}
/**
* Construct for an existing constraint that is being edited.
* @param path The path that is being constrained
* @param con the constraint being edited
* @param label text associated with this constraint, if a template query
* @param code the code of this constraint in the query
* @param editableInTemplate true if this is a template query and this constraint is editable
* @param switchOffAbility if the contraint is on, off, locked
* @param profile user editing the query, used to fetch available bags
* @param query the PathQuery, in order to provide information on candidate loops
* @param ac auto completer
* @param oss summary data for the ObjectStore contents
* @param bagQueryConfig addition details for needed for LOOKUP constraints
* @param classKeys identifier field config, needed for LOOKUP constraints
* @param bagManager provides access to saved bags
*/
protected DisplayConstraint(Path path, PathConstraint con, String label, String code,
boolean editableInTemplate, SwitchOffAbility switchOffAbility, Profile profile,
PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager,
List<Object> templateSummary) {
init(path, profile, query, ac, oss, bagQueryConfig, classKeys, bagManager);
this.con = con;
this.constraintLabel = label;
this.code = code;
this.editableInTemplate = editableInTemplate;
this.switchOffAbility = switchOffAbility;
this.templateSummary = templateSummary;
}
private void init(Path path, Profile profile, PathQuery query, AutoCompleter ac,
ObjectStoreSummary oss, BagQueryConfig bagQueryConfig,
Map<String, List<FieldDescriptor>> classKeys, BagManager bagManager) {
this.path = path;
this.ac = ac;
this.oss = oss;
this.endCls = getEndClass(path);
this.fieldName = getFieldName(path);
this.bagQueryConfig = bagQueryConfig;
this.classKeys = classKeys;
this.profile = profile;
this.query = query;
this.bagManager = bagManager;
this.isBagSelected = false;
}
private String getEndClass(Path path) {
if (path.isRootPath()) {
return path.getStartClassDescriptor().getType().getSimpleName();
} else {
return path.getLastClassDescriptor().getType().getSimpleName();
}
}
private String getFieldName(Path path) {
if (!path.isRootPath()) {
return path.getLastElement();
}
return null;
}
// TODO this should be in some common code
private String constraintStringValue(PathConstraint con) {
if (con instanceof PathConstraintAttribute) {
return ((PathConstraintAttribute) con).getValue();
} else if (con instanceof PathConstraintBag) {
return ((PathConstraintBag) con).getBag();
} else if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getValue();
} else if (con instanceof PathConstraintSubclass) {
return ((PathConstraintSubclass) con).getType();
} else if (con instanceof PathConstraintLoop) {
return ((PathConstraintLoop) con).getLoopPath();
} else if (con instanceof PathConstraintNull) {
return ((PathConstraintNull) con).getOp().toString();
}
return null;
}
/**
* If editing an existing constraint get the code for this constraint in the query, return null
* if creating a new constraint.
* @return the constraint code or null
*/
public String getCode() {
return code;
}
/**
* Return true if editing an existing template constraint and that constraint is editable.
* @return true if an editable template constraint, or null
*/
public boolean isEditableInTemplate() {
return editableInTemplate;
}
/**
* Get a representation of the path that is being constraint. DisplayPath provides convenience
* methods for use in JSP.
* @return the path being constrained
*/
public DisplayPath getPath() {
return new DisplayPath(path);
}
/**
* If editing an existing constraint, return the selected value. Otherwise return null. If
* an attribute constraint this will be the user entered. If a bag constraint, the selected
* bag name, etc. If an attribute constraint, but the use bag is setted, this will be the
* selectedBagValue setted
* @return the selected value or null
*/
public String getSelectedValue() {
if (isBagSelected) {
return selectedBagValue;
}
if (con != null) {
return constraintStringValue(con);
}
return null;
}
/**
* Returns the value collection if the constraint is a multivalue, otherwise return null.
*
* @return a Collection of Strings
*/
public Collection<String> getMultiValues() {
if (isMultiValueSelected()) {
return ((PathConstraintMultiValue) con).getValues();
}
return null;
}
/**
* If the constraint is a multivalue, returns the value collection
* represented as string separated by ',', otherwise return an empty String.
*
* @return a String representing the multivalues of constraint
*/
public String getMultiValuesAsString() {
String multiValuesAsString = "";
if (getMultiValues() != null) {
for (String value : getMultiValues()) {
multiValuesAsString += value + ",";
}
}
return multiValuesAsString;
}
/**
* Return true if editing an existing constraint and a bag has been selected.
* @return true if a bag has been selected
*/
public boolean isBagSelected() {
if (isBagSelected) {
return isBagSelected;
} else {
return (con != null && con instanceof PathConstraintBag);
}
}
/**
* Set if the bag is selected, used by the method isBagSelected that returns true,
* even if the constraint is an attribute constraint
* @param isBagSelected true if a bag has been selected
*/
public void setBagSelected(boolean isBagSelected) {
this.isBagSelected = isBagSelected;
}
/**
* Return true if editing an existing constraint and 'has a value' or 'has no value' has been
* selected.
* @return true if a null constraint was selected
*/
public boolean isNullSelected() {
return (con != null && con instanceof PathConstraintNull);
}
/**
* Return true if editing an existing having the attribute type boolean or Boolean
* @return true if the type is the primitive boolean or the object java.lang.Boolean
*/
public boolean isBoolean() {
String type = getPath().getType();
return ("boolean".equals(type) || "Boolean".equals(type));
}
/**
* Return true if editing an existing constraint and an attribute value or LOOKUP constraint
* was selected.
* @return true if an attribute/LOOKUP constraint was selected
*/
public boolean isValueSelected() {
if (con != null) {
return !(isBagSelected() || isNullSelected() || isLoopSelected());
}
return false;
}
/**
* Return true if editing an existing constraint and a loop value has been
* selected.
* @return true if a loop constraint was selected
*/
public boolean isLoopSelected() {
return (con != null && con instanceof PathConstraintLoop);
}
/**
* Return true if editing an existing constraint and a multivalue has been
* selected.
* @return true if a multivalue constraint was selected
*/
public boolean isMultiValueSelected() {
return (con != null && con instanceof PathConstraintMultiValue);
}
/**
* Return the last class in the path and fieldname as the title for the constraint.
* @return the title of this constraint
*/
public String getTitle() {
return endCls + (fieldName == null ? "" : " " + fieldName);
}
/**
* Return the label associated with a constraint if editing a template query constraint.
* @return the constraint label
*/
public String getDescription() {
return constraintLabel;
}
/**
* Return a help message to display alongside the constraint, this will examine the constraint
* type and generate and appropriate message, e.g. list the key fields for LOOKUP constraints
* and explain the use of wildcards. Returns null when there is no appropriate help.
* @return the help message or null
*/
public String getHelpMessage() {
return DisplayConstraintHelpMessages.getHelpMessage(this);
}
/**
* If the bag is selected, return the value setted with the method setSelectedBagOp
* If editing an existing constraint return the operation used.
* Otherwise return null.
* @return the selected constraint op or null
*/
public DisplayConstraintOption getSelectedOp() {
if (isBagSelected) {
return new DisplayConstraintOption(selectedBagOp.toString(),
selectedBagOp.getIndex());
}
if (con != null) {
ConstraintOp selectedOp = con.getOp();
if (selectedOp != null) {
return new DisplayConstraintOption(selectedOp.toString(), selectedOp.getIndex());
}
}
return null;
}
/**
* Set the seletedBagOp
* @param selectedBagOp the constraint op returned by the method getSelectedOp()
* if the bag is selected
*/
public void setSelectedBagOp(ConstraintOp selectedBagOp) {
this.selectedBagOp = selectedBagOp;
}
/**
* Set the seletedBagValue returned bye the getSelectedValue if the bag is selected
* @param selectedBagValue string to set the selectedBagValue
*/
public void setSelectedBagValue(String selectedBagValue) {
this.selectedBagValue = selectedBagValue;
}
/**
* If editing an existing LOOKUP constraint return the value selected for the extra constraint
* field. Otherwise return null
* @return the LOOKUP constraint extra value or null
*/
public String getSelectedExtraValue() {
if (con instanceof PathConstraintLookup) {
return ((PathConstraintLookup) con).getExtraValue();
}
return null;
}
/**
* Given the path being constrained return the valid constraint operations. If constraining an
* attribute the valid ops depend on the type being constraint - String, Integer, Boolean, etc.
* @return the valid constraint operations
*/
public List<DisplayConstraintOption> getValidOps() {
if (validOps != null) {
return validOps;
}
validOps = new ArrayList<DisplayConstraintOption>();
if (con instanceof PathConstraintBag) {
for (ConstraintOp op : PathConstraintBag.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
} else if (con instanceof PathConstraintSubclass) {
return validOps;
} else if (con instanceof PathConstraintLoop) {
List<DisplayConstraintOption> loopQueryOps = getLoopQueryOps();
for (DisplayConstraintOption dco : loopQueryOps) {
validOps.add(dco);
}
} else if (path.endIsAttribute()) {
List<ConstraintOp> allOps = SimpleConstraint.validOps(path.getEndType());
// TODO This was in the constraint jsp:
// <c:if test="${!(editingNode.type == 'String' && (op.value == '<='
//|| op.value == '>='))}">
// TODO this should show different options if a dropdown is to be used
boolean existPossibleValues =
(getPossibleValues() != null && getPossibleValues().size() > 0) ? true : false;
for (ConstraintOp op : allOps) {
if (!existPossibleValues && (op.getIndex() == 6 || op.getIndex() == 7)) {
continue;
}
validOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
if (existPossibleValues) {
for (ConstraintOp op : PathConstraintMultiValue.VALID_OPS) {
validOps.add(new DisplayConstraintOption(op.toString(),
op.getIndex()));
}
}
} else if (isLookup()) {
// this must be a LOOKUP constraint
ConstraintOp lookup = ConstraintOp.LOOKUP;
validOps.add(new DisplayConstraintOption(lookup.toString(), lookup.getIndex()));
}
return validOps;
}
/**
* Returns the set of operators valid for loop constraints.
*
* @return a List of DisplayConstraintOption objects
*/
public List<DisplayConstraintOption> getLoopQueryOps() {
return Arrays.asList(new DisplayConstraintOption(ConstraintOp.EQUALS.toString(),
ConstraintOp.EQUALS.getIndex()),
new DisplayConstraintOption(ConstraintOp.NOT_EQUALS.toString(),
ConstraintOp.NOT_EQUALS.getIndex()));
}
/**
* Return true if this constraint should be a LOOKUP, true if constraining a class (ref/col)
* instead of an attribute and that class has class keys defined.
* @return true if this constraint should be a LOOKUP
*/
public boolean isLookup() {
return !path.endIsAttribute() && ClassKeyHelper.hasKeyFields(classKeys, endCls);
}
/**
* Return the LOOKUP constraint op.
* @return the LOOKUP constraint op
*/
// TOOO do we need this? validOps should contain correct value
public DisplayConstraintOption getLookupOp() {
ConstraintOp lookup = ConstraintOp.LOOKUP;
return new DisplayConstraintOption(lookup.toString(), lookup.getIndex());
}
/**
* Return the autocompleter for this path if one is available. Otherwise return null.
* @return an autocompleter for this path or null
*/
public AutoCompleter getAutoCompleter() {
if (ac != null && ac.hasAutocompleter(endCls, fieldName)) {
return ac;
}
return null;
}
/**
* Values to populate a dropdown for the path if possible values are available.
* @return possible values to populate a dropdown
*/
public List<Object> getPossibleValues() {
String className = "";
if (path.isRootPath()) {
className = path.getStartClassDescriptor().getType().getCanonicalName();
} else {
className = path.getLastClassDescriptor().getType().getCanonicalName();
}
// if this is a template, it may have been summarised so we have a restricted set if values
// for particular paths (the TemplateSummariser runs queries to work out exact values
// constraints could take given the other constraints in the query.
if (templateSummary != null && !templateSummary.isEmpty()) {
return templateSummary;
}
// otherwise, we may have possible values from the ObjectStoreSummary
List<Object> fieldValues = oss.getFieldValues(className, fieldName);
if (path.endIsAttribute()) {
Class<?> type = path.getEndType();
if (Date.class.equals(type)) {
List<Object> fieldValueFormatted = new ArrayList<Object>();
for (Object obj : fieldValues) {
fieldValueFormatted.add(ConstraintValueParser.format((String) obj));
}
return fieldValueFormatted;
}
}
return fieldValues;
}
/**
* If a dropdown is available for a constraint fewer operations are possible, return the list
* of operations.
* @return the constraint ops available when selecting values from a dropdown
*/
// TODO Do we need this, could getValildOps return the correct ops if a dropdown is available
public List<DisplayConstraintOption> getFixedOps() {
if (fixedOps != null) {
return fixedOps;
}
if (getPossibleValues() != null) {
fixedOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : SimpleConstraint.fixedEnumOps(path.getEndType())) {
fixedOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
}
return fixedOps;
}
/**
* Return true if this is a LOOKUP constraint and an extra constraint should be available.
* @return true if an extra constraint option is available
*/
public boolean isExtraConstraint() {
if (isLookup()) {
String extraValueFieldName = bagQueryConfig.getConnectField();
ClassDescriptor cld = (path.isRootPath()) ? path.getStartClassDescriptor()
: path.getLastClassDescriptor();
ReferenceDescriptor fd = cld.getReferenceDescriptorByName(extraValueFieldName, true);
return fd != null;
} else {
return false;
}
}
/**
* If a LOOKUP constraint and an extra constraint is available for this path, return a list of
* the possible values for populating a dropdown. Otherwise return null.
* @return a list of possible extra constraint values
*/
public List<Object> getExtraConstraintValues() {
if (isExtraConstraint()) {
String extraValueFieldName = bagQueryConfig.getConstrainField();
return oss.getFieldValues(bagQueryConfig.getExtraConstraintClassName(),
extraValueFieldName);
}
return null;
}
/**
* If a LOOKUP constraint and an extra value constraint is available return the classname of
* the extra constraint so it can be displayed. Otherwise return null.
* @return the extra constraint class name or null
*/
public String getExtraConstraintClassName() {
if (isExtraConstraint()) {
String[] splitClassName = bagQueryConfig.getExtraConstraintClassName().split("[.]");
return splitClassName[splitClassName.length - 1];
//return bagQueryConfig.getExtraConstraintClassName();
}
return null;
}
/**
* Return the key fields for this path as a formatted string, for use in LOOKUP help message.
* @return a formatted string listing key fields for this path
*/
public String getKeyFields() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)) {
return StringUtil.prettyList(ClassKeyHelper.getKeyFieldNames(classKeys, endCls), true);
}
return null;
}
/**
* Get a list of public and user bag names available for this path. If none available return
* null.
* @return a list of available bag names or null
*/
public List<String> getBags() {
if (ClassKeyHelper.hasKeyFields(classKeys, endCls)) {
Map<String, InterMineBag> bags =
bagManager.getUserOrGlobalBagsOfType(profile, endCls);
if (!bags.isEmpty()) {
return new ArrayList<String>(bags.keySet());
}
}
return null;
}
/**
* Return the valid constraint ops when constraining on a bag.
* @return the possible bag constraint operations
*/
public List<DisplayConstraintOption> getBagOps() {
List<DisplayConstraintOption> bagOps = new ArrayList<DisplayConstraintOption>();
for (ConstraintOp op : BagConstraint.VALID_OPS) {
bagOps.add(new DisplayConstraintOption(op.toString(), op.getIndex()));
}
return bagOps;
}
/**
* Returns the bag type that the constraint can be constrained to.
* If there aren't bags return null
*
* @return a String
*/
public String getBagType() {
if (getBags() != null) {
return endCls;
} else {
return null;
}
}
/**
* Returns the constraint type selected.
*
* @return a String representing the constraint type selected
*/
public String getSelectedConstraint() {
if (isBagSelected()) {
return "bag";
} else if (isNullSelected()) {
return "empty";
} else if (isLoopSelected()) {
return "loopQuery";
}
return "attribute";
}
/**
* Returns the set of paths that could feasibly be loop constrained onto the constraint's path,
* given the query's outer join situation. A candidate path must be a class path, of the same
* type, and in the same outer join group.
*
* @return a Set of String paths that could be loop joined
* @throws PathException if something goes wrong
*/
public Set<String> getCandidateLoops() throws PathException {
if (path.endIsAttribute()) {
return Collections.emptySet();
} else {
if (con instanceof PathConstraintLoop) {
Set<String> retval = new LinkedHashSet<String>();
retval.add(((PathConstraintLoop) con).getLoopPath());
retval.addAll(query.getCandidateLoops(path.getNoConstraintsString()));
return retval;
} else {
return query.getCandidateLoops(path.getNoConstraintsString());
}
}
}
/**
* Return true if the constraint is locked, it should'n be enabled or disabled.
* @return true if the constraint is locked
*/
public boolean isLocked() {
if (switchOffAbility == null || switchOffAbility == SwitchOffAbility.LOCKED) {
return true;
}
return false;
}
/**
* Return true if the constraint is enabled, false if it is disabled or locked.
* @return true if the constraint is enabled,false if it is disabled or locked
*/
public boolean isEnabled() {
if (switchOffAbility == SwitchOffAbility.ON) {
return true;
}
return false;
}
/**
* Return true if the constraint is disabled, false if it is enabled or locked.
* @return true if the constraint is disabled,false if it is enabled or locked
*/
public boolean isDisabled() {
if (switchOffAbility == SwitchOffAbility.OFF) {
return true;
}
return false;
}
/**
* Return the value on, off, locked depending on the constraint SwitchOffAbility .
* @return switchable property (on, off, locked)
*/
public String getSwitchable() {
if (SwitchOffAbility.ON.equals(switchOffAbility)) {
return SwitchOffAbility.ON.toString().toLowerCase();
} else if (SwitchOffAbility.OFF.equals(switchOffAbility)) {
return SwitchOffAbility.OFF.toString().toLowerCase();
} else {
return SwitchOffAbility.LOCKED.toString().toLowerCase();
}
}
/**
* Set the switchOffAbility
* @param switchOffAbility value
*/
public void setSwitchOffAbility(SwitchOffAbility switchOffAbility) {
this.switchOffAbility = switchOffAbility;
}
/**
* Return true if the input field can be displayed, method for use in JSP
* @return true if the input is displayed
*/
public boolean isInputFieldDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()) {
return true;
}
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
if (con instanceof PathConstraintBag) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return false;
}
return true;
}
/**
* Return true if the drop-down containing the possibleValues can be displayed,
* method for use in JSP
* @return true if the drop-down is displayed
*/
public boolean isPossibleValuesDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.MATCHES.getIndex()
|| selectedOperator == ConstraintOp.DOES_NOT_MATCH.getIndex()
|| selectedOperator == ConstraintOp.LOOKUP.getIndex()
|| selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
if (getPossibleValues() != null && getPossibleValues().size() > 0) {
return true;
}
return false;
}
/**
* Return true if the multi-select containing the possibleValue can be displayed,
* method for use in JSP
* @return true if the multi-select is displayed
*/
public boolean isMultiValuesDisplayed() {
if (con != null) {
int selectedOperator = getSelectedOp().getProperty();
if (selectedOperator == ConstraintOp.ONE_OF.getIndex()
|| selectedOperator == ConstraintOp.NONE_OF.getIndex()) {
return true;
}
return false;
} return false;
}
/**
* Representation of a constraint operation to populate a dropdown. Label is value to be
* displayed in the dropdown, property is the index of the constraint that will be selected.
* @author Richard Smith
*
*/
public class DisplayConstraintOption
{
private String label;
private Integer property;
/**
* Construct with the constraint lable and index
* @param label the value to be shown in dropdown
* @param property the constraint index to be added to form on selection
*/
public DisplayConstraintOption(String label, Integer property) {
this.label = label;
this.property = property;
}
/**
* Get the value to be displayed in the dropdown for this operation.
* @return the display value
*/
public String getLabel() {
return label;
}
/**
* Get the constraint index to be put in form when this op is selected.
* @return the constraint index
*/
public Integer getProperty() {
return property;
}
}
}
|
package org.intermine.web.struts;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.intermine.metadata.ClassDescriptor;
import org.intermine.metadata.Model;
import org.intermine.objectstore.ObjectStore;
import org.intermine.objectstore.query.BagConstraint;
import org.intermine.path.Path;
import org.intermine.path.PathError;
import org.intermine.util.TypeUtil;
import org.intermine.web.logic.Constants;
import org.intermine.web.logic.config.FieldConfig;
import org.intermine.web.logic.config.FieldConfigHelper;
import org.intermine.web.logic.config.WebConfig;
import org.intermine.web.logic.query.Constraint;
import org.intermine.web.logic.query.MainHelper;
import org.intermine.web.logic.query.MetadataNode;
import org.intermine.web.logic.query.PathNode;
import org.intermine.web.logic.query.PathQuery;
import org.intermine.web.logic.session.SessionMethods;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.tiles.ComponentContext;
import org.apache.struts.tiles.actions.TilesAction;
/**
* Controller for the main query builder tile. Generally, request attributes that are required by
* multiple tiles on the query builder are synthesized here.
*
* @author Mark Woodbridge
* @author Thomas Riley
* @see org.intermine.web.struts.QueryBuilderConstraintController
* @see org.intermine.web.struts.QueryBuilderPathsController
*/
public class QueryBuilderController extends TilesAction
{
/**
* @see TilesAction#execute
*/
public ActionForward execute(ComponentContext context, ActionMapping mapping, ActionForm form,
HttpServletRequest request, HttpServletResponse response)
throws Exception {
populateRequest(request, response);
return null;
}
/**
* Populate the request with the necessary attributes to render the query builder page. This
* method is static so that it can be called from the AJAX actions in MainChange.java
*
* @param request
* the current request
* @param response
* the current response
* @see QueryBuilderChange
*/
public static void populateRequest(HttpServletRequest request, HttpServletResponse response) {
HttpSession session = request.getSession();
ServletContext servletContext = session.getServletContext();
ObjectStore os = (ObjectStore) servletContext.getAttribute(Constants.OBJECTSTORE);
Model model = os.getModel();
PathQuery query = (PathQuery) session.getAttribute(Constants.QUERY);
// constraint display values
request.setAttribute("lockedPaths", listToMap(findLockedPaths(query)));
List<Path> pathView = SessionMethods.getEditingView(session);
// sort order
List<Path> sortOrder = SessionMethods.getEditingSortOrder(session);
List<String> sortOrderStrings = new ArrayList<String>();
if (sortOrder != null) {
for (Path sortOrderString: sortOrder) {
sortOrderStrings.add(sortOrderString.toStringNoConstraints());
}
}
Integer sortByIndex = new Integer(0); // sort-by-field's index in the select list
// select list
List<String> viewStrings = new ArrayList<String>();
for (Path viewPath: pathView) {
String viewPathString = viewPath.toStringNoConstraints();
viewStrings.add(viewPathString);
if (sortOrderStrings.contains(viewPathString)) {
sortByIndex = new Integer(pathView.indexOf(viewPath));
}
}
request.setAttribute("viewStrings", viewStrings);
request.setAttribute("sortByIndex", sortByIndex);
/* if sortOrderStrings are empty (probably a template), add first item in select */
if (sortOrderStrings.isEmpty() && !viewStrings.isEmpty()) {
sortOrderStrings.add(viewStrings.get(0));
}
request.setAttribute("sortOrderStrings", sortOrderStrings);
request.setAttribute("sortOrderPaths", listToMap(sortOrderStrings));
//request.setAttribute("sortOrderPathOrder", createIndexMap(sortOrderStrings));
//request.setAttribute("sortOrderPathTypes", getPathTypes(sortOrderStrings, query));
List<String> errorPaths = new ArrayList<String>();
Throwable[] messages = query.getProblems();
for (Throwable thr: messages) {
if (thr instanceof PathError) {
errorPaths.add(((PathError) thr).getPathString());
}
}
request.setAttribute("errorPaths", errorPaths);
request.setAttribute("viewPaths", listToMap(viewStrings));
request.setAttribute("viewPathOrder", createIndexMap(viewStrings));
//request.setAttribute("viewPathTypes", getPathTypes(viewStrings, query));
// set up the metadata
WebConfig webConfig = (WebConfig) servletContext.getAttribute(Constants.WEBCONFIG);
boolean isSuperUser;
Boolean superUserAttribute = (Boolean) session.getAttribute(Constants.IS_SUPERUSER);
if (superUserAttribute != null && superUserAttribute.equals(Boolean.TRUE)) {
isSuperUser = true;
} else {
isSuperUser = false;
}
String prefix = (String) session.getAttribute("prefix");
Collection nodes =
MainHelper.makeNodes((String) session.getAttribute("path"), model, isSuperUser);
for (Iterator iter = nodes.iterator(); iter.hasNext();) {
MetadataNode node = (MetadataNode) iter.next();
// Update view nodes
String pathName = node.getPathString();
int firstDot = pathName.indexOf('.');
String fullPath;
if (firstDot == -1) {
fullPath = prefix;
} else {
String pathNameWithoutClass = pathName.substring(firstDot + 1);
fullPath = prefix + "." + pathNameWithoutClass;
}
if (viewStrings.contains(fullPath)) {
node.setSelected(true);
} else {
Path path = new Path(model, pathName);
// If an object has been selected, select its fields instead
if (path.getEndFieldDescriptor() == null || path.endIsReference()
|| path.endIsCollection()) {
if (viewStrings.contains(path)) {
ClassDescriptor cld = path.getEndClassDescriptor();
List cldFieldConfigs =
FieldConfigHelper.getClassFieldConfigs(webConfig, cld);
Iterator cldFieldConfigIter = cldFieldConfigs.iterator();
while (cldFieldConfigIter.hasNext()) {
FieldConfig fc = (FieldConfig) cldFieldConfigIter.next();
String pathFromField = pathName + "." + fc.getFieldExpr();
if (viewStrings.contains(pathFromField)) {
node.setSelected(true);
} else {
node.setSelected(false);
}
}
}
}
}
}
request.setAttribute("nodes", nodes);
Map<String, String> prefixes = getViewPathLinkPaths(query);
request.setAttribute("viewPathLinkPrefixes", prefixes);
request.setAttribute("viewPathLinkPaths", getPathTypes(prefixes.values(), query));
// set up the navigation links (eg. Department > employees > department)
String current = null;
Map<String, String> navigation = new LinkedHashMap<String, String>();
Map<String, String> navigationPaths = new LinkedHashMap<String, String>();
if (prefix != null && prefix.indexOf(".") != -1) {
for (StringTokenizer st = new StringTokenizer(prefix, "."); st.hasMoreTokens();) {
String token = st.nextToken();
current = (current == null ? token : current + "." + token);
navigation.put(token, current);
navigationPaths.put(token, TypeUtil.unqualifiedName(MainHelper
.getTypeForPath(current, query)));
}
}
request.setAttribute("navigation", navigation);
request.setAttribute("navigationPaths", navigationPaths);
}
/**
* Given a input List, return a Map from list element value to list index.
*
* @param list
* a List
* @return Map from list element values to list index Integer
*/
protected static Map createIndexMap(List list) {
HashMap map = new HashMap();
for (int i = 0; i < list.size(); i++) {
map.put(list.get(i), new Integer(i));
}
return map;
}
/**
* Get a list of paths that should not be removed from the query by the user. This is usually
* because they are involved in a loop query constraint.
*
* @param pathquery
* the PathQuery containing the paths
* @return list of paths (as Strings) that cannot be removed by the user
*/
protected static List findLockedPaths(PathQuery pathquery) {
ArrayList paths = new ArrayList();
Iterator iter = pathquery.getNodes().values().iterator();
while (iter.hasNext()) {
PathNode node = (PathNode) iter.next();
Iterator citer = node.getConstraints().iterator();
while (citer.hasNext()) {
Constraint con = (Constraint) citer.next();
if (!node.isAttribute() && !BagConstraint.VALID_OPS.contains(con.getOp())) {
// loop query constraint
// get path and superpaths
String path = (String) con.getValue();
while (path != null) {
paths.add(path);
if (path.indexOf('.') != -1) {
path = path.substring(0, path.lastIndexOf('.'));
} else {
path = null;
}
}
}
}
}
return paths;
}
/**
* Return a Map from path to unqualified type name.
*
* @param paths
* collection of paths
* @param pathquery
* related PathQuery
* @return Map from path to type
*/
protected static Map getPathTypes(Collection paths, PathQuery pathquery) {
Map<String, String> viewPathTypes = new HashMap<String, String>();
Iterator iter = paths.iterator();
while (iter.hasNext()) {
String path = (String) iter.next();
String unqualifiedName = TypeUtil.unqualifiedName(MainHelper.getTypeForPath(path,
pathquery));
viewPathTypes.put(path, unqualifiedName);
}
return viewPathTypes;
}
/**
* Return a Map from path to path/subpath pointing to the nearest not attribute for each path on
* the select list. practise this results in the same path or the path with an attribute name
* chopped off the end.
*
* @param pathquery
* the path query
* @return mapping from select list path to non-attribute path
*/
protected static Map<String, String> getViewPathLinkPaths(PathQuery pathquery) {
Map<String, String> linkPaths = new HashMap<String, String>();
Iterator<Path> iter = pathquery.getView().iterator();
while (iter.hasNext()) {
Path path = iter.next();
String pathString = path.toStringNoConstraints();
if (path.endIsAttribute()) {
linkPaths.put(pathString, pathString.substring(0, pathString.lastIndexOf(".")));
} else {
linkPaths.put(pathString, pathString);
}
}
return linkPaths;
}
/**
* Returns a map where every item in <code>list</code> maps to Boolean TRUE.
*
* @param list
* the list of map keys
* @return a map that maps every item in list to Boolean.TRUE
*/
protected static Map listToMap(List list) {
Map map = new HashMap();
int n = list.size();
for (int i = 0; i < n; i++) {
map.put(list.get(i), Boolean.TRUE);
}
return map;
}
}
|
package net.anyflow.network.avro;
import java.net.InetSocketAddress;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import net.anyflow.network.Configurator;
import net.anyflow.network.exception.DefaultException;
import org.apache.avro.ipc.NettyServer;
import org.apache.avro.ipc.Responder;
import org.apache.avro.ipc.Server;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.execution.ExecutionHandler;
import org.jboss.netty.handler.execution.OrderedMemoryAwareThreadPoolExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class AvroServer {
private static final Logger logger = LoggerFactory.getLogger(AvroServer.class);
private static AvroServer instance;
private Server server;
private AvroServer() {
}
public static void start(Responder responder) throws DefaultException {
start(responder, Configurator.getAvroPort());
}
public static void start(Responder responder, int port) {
if(instance == null) {
instance = new AvroServer();
}
ExecutorService executor = Executors.newCachedThreadPool();
instance.server = new NettyServer(responder
, new InetSocketAddress(port)
, new NioServerSocketChannelFactory(executor, executor)
, new ExecutionHandler(new OrderedMemoryAwareThreadPoolExecutor(16, 1048576, 1048576)));
logger.info("Avro server started.");
}
public static void stop() {
if(instance == null) {
return;
}
instance.server.close();
}
public static int getPort() {
if(instance == null) {
return -1;
}
return instance.server.getPort();
}
}
|
package net.ssehub.kernel_haven.util;
import static net.ssehub.kernel_haven.util.null_checks.NullHelpers.notNull;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import net.ssehub.kernel_haven.SetUpException;
import net.ssehub.kernel_haven.config.Configuration;
import net.ssehub.kernel_haven.config.DefaultSettings;
import net.ssehub.kernel_haven.util.Util.Color;
import net.ssehub.kernel_haven.util.null_checks.NonNull;
import net.ssehub.kernel_haven.util.null_checks.Nullable;
/**
* A thread-safe singleton logger.
*
* @author Adam
* @author alice
* @author moritz
*/
public final class Logger {
/**
* The available log levels.
*/
public static enum Level {
/**
* No messages are logged.
*/
NONE("none ", -1, "none "),
/**
* Only error messages are logged.
*/
ERROR("error ", 0, Color.RED.getAnsiCode() + "error" + Color.RESET.getAnsiCode() + " "),
/**
* Error and warning messages are logged.
*/
WARNING("warning", 1, Color.YELLOW.getAnsiCode() + "warning" + Color.RESET.getAnsiCode() + ""),
/**
* Error, warning, and status messages are logged.
*/
STATUS("status ", 2, Color.BLUE.getAnsiCode() + "status" + Color.RESET.getAnsiCode() + " "),
/**
* Error, warning, status, and info messages are logged.
*/
INFO("info ", 3, Color.GREEN.getAnsiCode() + "info" + Color.RESET.getAnsiCode() + " "),
/**
* All messages (error, warning, status, info, debug) are logged.
*/
DEBUG("debug ", 4, Color.CYAN.getAnsiCode() + "debug" + Color.RESET.getAnsiCode() + " ");
private @NonNull String str;
private int level;
private @NonNull String ansiString;
/**
* Creates a log level.
*
* @param str The string representation of this log level.
* @param level The level. All log levels with values <= this value will be logged.
* @param ansiString The string representation of this log level with ANSI escape code colors.
*/
private Level(@NonNull String str, int level, @NonNull String ansiString) {
this.str = str;
this.level = level;
this.ansiString = ansiString;
}
/**
* Whether a message with the specified level will be logged, if the logger is set to this level.
*
* @param other The other level to check.
* @return Whether the other level will be logged if this level is set.
*/
public boolean shouldLog(@NonNull Level other) {
return this.level >= other.level;
}
@Override
public @NonNull String toString() {
return notNull(str.trim());
}
/**
* Creates a string representation of this level to be used in the log output. All levels will return strings
* of equal length.
*
* @param ansiColor Whether to add ANSI coloring codes or not.
* @return A string representation of this level.
*/
private @NonNull String toLogString(boolean ansiColor) {
return ansiColor ? ansiString : str;
}
}
/**
* The singleton instance.
*/
private static @NonNull Logger instance = new Logger();
/**
* The level at which this logger should start logging.
*/
private @NonNull Level level;
/**
* The target to log to.
*/
private @NonNull ArrayList<@NonNull Target> targets;
/**
* The charset used by the logger. All targets have the same charset.
*/
private @NonNull Charset charset;
/**
* File used as target for logging specified in the configuration.
*/
private @Nullable File logFile;
private @Nullable Boolean forceColor;
/**
* A single target of this logger.
*/
private static final class Target {
private @NonNull OutputStream out;
private boolean useColor;
/**
* Creates a new target.
*
* @param out The output stream of this target.
* @param useColor Whether to use ANSI color codes on this target.
*/
public Target(@NonNull OutputStream out, boolean useColor) {
this.out = out;
this.useColor = useColor;
}
}
/**
* Instantiates a new logger.
*/
private Logger() {
this.targets = new ArrayList<>(2);
addTarget(notNull(System.out));
this.charset = notNull(Charset.forName("UTF-8"));
this.level = Level.INFO;
}
/**
* Gets the singleton instance of Logger.
*
* @return the logger
*/
public static @NonNull Logger get() {
return instance;
}
/**
* The setup method sets the log level and targets. Overrides any existing targets.
*
* @param config The configuration for the logger; must not be <code>null</code>.
* @throws SetUpException
* Throws the SetUpException when the path to log to is not a
* valid directory.
*/
public void setup(@NonNull Configuration config) throws SetUpException {
synchronized (targets) {
targets.clear();
logFile = null;
level = config.getValue(DefaultSettings.LOG_LEVEL);
forceColor = config.getValue(DefaultSettings.LOG_FORCE_COLOR);
if (config.getValue(DefaultSettings.LOG_CONSOLE)) {
addTarget(notNull(System.out));
}
if (config.getValue(DefaultSettings.LOG_FILE)) {
logFile = new File(config.getValue(DefaultSettings.LOG_DIR),
Timestamp.INSTANCE.getFilename("KernelHaven", "log"));
try {
addTarget(new FileOutputStream(logFile));
} catch (FileNotFoundException e) {
throw new SetUpException(e);
}
}
}
}
/**
* Returns the list of targets for this logger.
*
* @return An unmodifiable list of the targets of this logger.
*/
public @NonNull List<@NonNull OutputStream> getTargets() {
synchronized (targets) {
List<@NonNull OutputStream> result = new ArrayList<>(targets.size());
for (Target target : targets) {
result.add(target.out);
}
return notNull(Collections.unmodifiableList(result));
}
}
/**
* Removes a target from the list of log targets.
*
* @param index The index of the target to remove (see {@link #getTargets()} for indices).
*
* @throws IndexOutOfBoundsException If the index is out of bounds.
*/
public void removeTarget(int index) throws IndexOutOfBoundsException {
synchronized (targets) {
targets.remove(index);
}
}
/**
* Removes all targets that this logger currently logs to.
*/
public void clearAllTargets() {
synchronized (targets) {
targets.clear();
}
}
/**
* Adds a target for this logger. This logger will always obtain a lock (via synchronized(target)) on this object
* before writing to it.
*
* @param target The target to add to this logger.
*/
public void addTarget(@NonNull OutputStream target) {
synchronized (targets) {
targets.add(new Target(target, useColor(target)));
}
}
/**
* Overwrite the current log level.
*
* @param level The new log level.
*/
public void setLevel(@NonNull Level level) {
this.level = level;
}
/**
* Creates a "header" prefix for log lines. The lines contain the specified
* log level, the name of the current thread and the time.
*
* @param level The log level that will be used. Must not be null.
* @param useColors Whether ANSI color codes should be used.
* @return A string in the format "[level] [time] [threadName] "
*/
private @NonNull String constructHeader(@NonNull Level level, boolean useColors) {
StringBuffer hdr = new StringBuffer();
String timestamp = new Timestamp().getTimestamp();
String levelStr = level.toLogString(useColors);
String threadName = Thread.currentThread().getName();
if (useColors) {
threadName = Color.WHITE.getAnsiCode() + threadName + Color.RESET.getAnsiCode();
}
hdr
.append('[')
.append(timestamp)
.append("] [")
.append(levelStr)
.append("] [")
.append(threadName).append("] ");
return notNull(hdr.toString());
}
/**
* Checks first whether to log the message and only iff the message shall be logged, it will concatenate
* the single parts and log the complete message into a single line.
* @param level The log level to be written. Must not be <tt>null</tt>.
* @param messageParts The message to be logged, for all elements {@link Object#toString()} is called to concatenate
* the message.
*/
private void log(@NonNull Level level, @Nullable Object /*@NonNull*/ ... messageParts) {
if (this.level.shouldLog(level) && null != messageParts) {
StringBuffer messageLine = new StringBuffer();
for (Object object : messageParts) {
if (object instanceof Object[]) {
Object[] innerArray = (Object[]) object;
for (Object innerObject : innerArray) {
messageLine.append(innerObject != null ? innerObject.toString() : "null");
}
} else {
messageLine.append(object != null ? object.toString() : "null");
}
}
log(level, messageLine.toString());
}
}
/**
* Writes a single log entry consisting of the specified lines with the
* specified log level to the target. Internally, a lock on {@link #target}
* is acquired to ensure that messages are not splitted in a multi-threaded
* environment.
*
* @param level
* The log level to be written. Must not be null.
* @param lines
* The lines that are written together as one log entry. Must not
* be null.
*/
private void log(@NonNull Level level, String /*@NonNull*/ ... lines) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
if (!this.level.shouldLog(level)) {
return;
}
String header = constructHeader(level, false);
String indent = "";
if (lines.length > 1) {
char[] whitespaces = new char[header.length()];
Arrays.fill(whitespaces, ' ');
indent = new String(whitespaces);
}
byte[] headerBytes = header.getBytes(charset);
StringBuffer str = new StringBuffer();
for (int i = 0; i < lines.length; i++) {
if (i != 0) {
str.append(indent);
}
str.append(lines[i]).append('\n');
}
byte[] bytes = str.toString().getBytes(charset);
synchronized (this.targets) {
for (Target target : this.targets) {
synchronized (target.out) {
try {
if (target.useColor) {
// no need to cache this header, since only one target will be System.out
target.out.write(constructHeader(level, true).getBytes(charset));
} else {
target.out.write(headerBytes);
}
target.out.write(bytes);
target.out.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
}
/**
* Determines whether ANSI color codes should be used for the given target stream.
*
* @param target The stream to log to.
*
* @return Whether ANSI color codes should be used.
*/
private boolean useColor(OutputStream target) {
boolean result;
Boolean forceColor = this.forceColor;
if (forceColor != null) {
result = forceColor && target == System.out;
} else {
result = Util.isTTY(target);
}
return result;
}
/**
* Logs a log entry with the log level "info".
*
* @param lines
* The content of the log entry. Must not be null.
*/
public void logInfo(String /*@NonNull*/ ... lines) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.INFO, lines);
}
/**
* Logs a log entry with the log level "info".
*
* @param messageParts The content of the log entry.
*/
public void logInfo2(@Nullable Object /*@NonNull*/ ... messageParts) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.INFO, messageParts);
}
/**
* Logs a log entry with the log level "info".
*
* @param lines
* The content of the log entry. Must not be null.
*/
public void logStatusLines(String /*@NonNull*/ ... lines) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.STATUS, lines);
}
/**
* Logs a log entry with the log level "info".
*
* @param messageParts The content of the log entry.
*/
public void logStatus(@Nullable Object /*@NonNull*/ ... messageParts) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.STATUS, messageParts);
}
/**
* Logs a log entry with the log level "debug".
*
* @param lines
* The content of the log entry. Must not be null.
*/
public void logDebug(String /*@NonNull*/ ... lines) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.DEBUG, lines);
}
/**
* Logs a log entry with the log level "debug".
*
* @param messageParts The content of the log entry.
*/
public void logDebug2(@Nullable Object /*@NonNull*/ ... messageParts) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.DEBUG, messageParts);
}
/**
* Logs a log entry with the log level "warning".
*
* @param lines
* The content of the log entry.
*/
public void logWarning(String /*@NonNull*/ ... lines) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.WARNING, lines);
}
/**
* Logs a log entry with the log level "warning".
*
* @param messageParts The content of the log entry.
*/
public void logWarning2(@Nullable Object /*@NonNull*/ ... messageParts) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.WARNING, messageParts);
}
/**
* Logs a log entry with the log level "error".
*
* @param lines
* The content of the log entry. Must not be null.
*/
public void logError(String /*@NonNull*/ ... lines) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.ERROR, lines);
}
/**
* Logs a log entry with the log level "error".
*
* @param messageParts The content of the log entry.
*/
public void logError2(@Nullable Object /*@NonNull*/ ... messageParts) {
// TODO: commented out @NonNull annotation because checkstyle can't parse it
log(Level.ERROR, messageParts);
}
/**
* Converts a given exception to a string and adds the lines to the list.
* The string will contain the stack trace, much like
* {@link Throwable#printStackTrace()}. Additionally, the causing exceptions
* are converted into strings, too.
*
* @param exc
* The exception to convert into a string. Must not be null.
* @param lines
* The output target. The lines are appended to this list. Must
* not be null.
*/
private void exceptionToString(@NonNull Throwable exc, @NonNull List<String> lines) {
String[] headerLines = exc.toString().split("\n");
for (String headerLine : headerLines) {
lines.add(headerLine);
}
StackTraceElement[] stack = exc.getStackTrace();
for (int i = 0; i < stack.length; i++) {
lines.add(" at " + stack[i].toString());
}
Throwable cause = exc.getCause();
if (cause != null) {
lines.add("Caused by:");
exceptionToString(cause, lines);
}
}
/**
* Creates a log entry from the given comment and exception.
*
* @param level The log level to log at.
* @param comment
* A comment that is displayed above the exception. A ":" is
* appended to it by this method. Must not be null.
* @param exc
* The exception to log. A stack trace will be logged.
*/
private void logException(@NonNull Level level, @NonNull String comment, @Nullable Throwable exc) {
List<String> lines;
if (exc != null) {
lines = new ArrayList<>(exc.getStackTrace().length + 2);
lines.add(comment + ":");
exceptionToString(exc, lines);
} else {
lines = new ArrayList<>(1);
lines.add(comment + ": <exception is null>");
}
log(level, notNull(lines.toArray(new String[0])));
}
/**
* Creates a log entry from the given comment and exception. The log level
* is "error".
*
* @param comment
* A comment that is displayed above the exception. A ":" is
* appended to it by this method. Must not be null.
* @param exc
* The exception to log. A stack trace will be logged.
*/
public void logException(@NonNull String comment, @Nullable Throwable exc) {
logException(Level.ERROR, comment, exc);
}
/**
* Creates a log entry from the given comment and exception. The log level
* is "debug".
*
* @param comment
* A comment that is displayed above the exception. A ":" is
* appended to it by this method. Must not be null.
* @param exc
* The exception to log. A stack trace will be logged.
*/
public void logExceptionDebug(@NonNull String comment, @Nullable Throwable exc) {
logException(Level.DEBUG, comment, exc);
}
/**
* Creates a log entry from the given comment and exception. The log level
* is "warning".
*
* @param comment
* A comment that is displayed above the exception. A ":" is
* appended to it by this method. Must not be null.
* @param exc
* The exception to log. A stack trace will be logged.
*/
public void logExceptionWarning(@NonNull String comment, @Nullable Throwable exc) {
logException(Level.WARNING, comment, exc);
}
/**
* Creates a log entry from the given comment and exception. The log level
* is "info".
*
* @param comment
* A comment that is displayed above the exception. A ":" is
* appended to it by this method. Must not be null.
* @param exc
* The exception to log. A stack trace will be logged.
*/
public void logExceptionInfo(@NonNull String comment, @Nullable Throwable exc) {
logException(Level.INFO, comment, exc);
}
/**
* Prints a stack trace of the current thread to the log.
* @param level The log level to be written. Must not be <tt>null</tt>.
* @param text The title of the message to be printed (introduces the stack trace).
*/
public void logStackTrace(@NonNull Level level, @NonNull String text) {
if (this.level.shouldLog(level)) {
// Creates the Stack Trace (copied from: Thread.currentThread().dumpStack())
Exception stackTrace = new Exception(text);
StringWriter sWriter = new StringWriter();
PrintWriter pWriter = new PrintWriter(sWriter);
stackTrace.printStackTrace(pWriter);
// Delete exception from part from Stack Trace:
String result = notNull(sWriter.toString());
int startPos = 21; // "java.lang.Exception: ".length()
if (result.length() > startPos) {
result = notNull(result.substring(startPos));
}
log(level, result.split("\n"));
}
}
/**
* Gets the target logging file specified in the configuration..
*
* @return the file used as logging target. May be null if not logging to a file.
*/
public @Nullable File getLogFile() {
return logFile;
}
}
|
package nonregressiontest;
public class TestCVSMessageLog {
// Adding a dummy line in the class to test the diff
}
|
package org.jtrim.concurrent.async;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.jtrim.cache.JavaRefObjectCache;
import org.jtrim.cache.ObjectCache;
import org.jtrim.cache.ReferenceType;
import org.jtrim.cache.VolatileReference;
import org.jtrim.cancel.Cancellation;
import org.jtrim.cancel.CancellationSource;
import org.jtrim.cancel.CancellationToken;
import org.jtrim.collections.RefCollection;
import org.jtrim.collections.RefLinkedList;
import org.jtrim.collections.RefList;
import org.jtrim.concurrent.*;
import org.jtrim.event.ListenerRef;
import org.jtrim.utils.ExceptionHelper;
/**
* @see AsyncLinks#refCacheResult(AsyncDataLink, ReferenceType, ObjectCache, long, TimeUnit)
*
* @author Kelemen Attila
*/
final class RefCachedDataLink<DataType>
implements
AsyncDataLink<RefCachedData<DataType>> {
// Note that if noone but the internal objects reference this
// data link noone can register with it and if there is no
// listener it would be safe to cancel immediately the data receiving
// however this would make this code more complex so this feature is
// not implemented yet.
private static final ScheduledExecutorService CANCEL_TIMER
= ExecutorsEx.newSchedulerThreadedExecutor(1, true,
"RefCachedDataLink cancel timer");
private final ReferenceType refType;
private final ObjectCache refCreator;
private final AsyncDataLink<? extends DataType> wrappedDataLink;
private final long dataCancelTimeoutNanos;
// Everything is synchronized by being accessed on inOrderExecutor
// So except for the executeSynchronized methods and where otherwise noted,
// private methods (including even public methods of private inner classes)
// are only allowed to be called in the context of inOrderExecutor.
private final ContextAwareTaskExecutor inOrderExecutor;
private final DispatcherListener dispatcher;
private final RefList<Registration> currentRegistrations;
private SessionInfo<DataType> currentSession;
public RefCachedDataLink(
AsyncDataLink<? extends DataType> wrappedDataLink,
ReferenceType refType, ObjectCache refCreator,
long dataCancelTimeout, TimeUnit timeoutUnit) {
ExceptionHelper.checkNotNullArgument(wrappedDataLink, "wrappedDataLink");
ExceptionHelper.checkNotNullArgument(refType, "refType");
ExceptionHelper.checkNotNullArgument(timeoutUnit, "timeoutUnit");
ExceptionHelper.checkArgumentInRange(dataCancelTimeout, 0, Long.MAX_VALUE, "dataCancelTimeout");
this.refType = refType;
this.refCreator = refCreator != null
? refCreator
: JavaRefObjectCache.INSTANCE;
this.dataCancelTimeoutNanos = timeoutUnit.toNanos(dataCancelTimeout);
this.wrappedDataLink = wrappedDataLink;
this.inOrderExecutor = TaskExecutors.inOrderSyncExecutor();
this.currentRegistrations = new RefLinkedList<>();
this.currentSession = new SessionInfo<>();
this.dispatcher = new DispatcherListener();
}
private void executeSynchronized(CancelableTask task) {
inOrderExecutor.execute(Cancellation.UNCANCELABLE_TOKEN, task, null);
}
private void executeSynchronized(final Runnable task) {
executeSynchronized(new CancelableTask() {
@Override
public void execute(CancellationToken cancelToken) {
task.run();
}
});
}
@Override
public AsyncDataController getData(
CancellationToken cancelToken,
AsyncDataListener<? super RefCachedData<DataType>> dataListener) {
final Registration registration = new Registration(cancelToken, dataListener);
final InitLaterDataController controller = new InitLaterDataController();
executeSynchronized(new Runnable() {
@Override
public void run() {
AsyncDataController wrappedController;
switch (currentSession.state) {
case NOT_STARTED:
wrappedController = startNewSession(registration);
break;
case RUNNING:
wrappedController = attachToSession(registration);
break;
case FINALIZING:
throw new IllegalStateException("This data link is"
+ " broken due to an error in the"
+ " onDoneReceive.");
case DONE:
wrappedController = attachToDoneSession(registration);
break;
default:
throw new AssertionError("Unexpected enum value.");
}
controller.initController(wrappedController);
}
});
return new DelegatedAsyncDataController(controller);
}
private void clearCurrentSession() {
assert inOrderExecutor.isExecutingInThis();
Future<?> prevCancelTimer = currentSession.cancelTimerFuture;
CancellationSource prevCancelSource = currentSession.cancelSource;
currentSession = new SessionInfo<>();
if (prevCancelTimer != null) {
// Just to be on the safe side we don't interrupt
// the thread, since we have no idea what thread would
// that interrupt. Besides, cancellation tasks are expected
// to be fast and should not consider thread interruption.
prevCancelTimer.cancel(false);
}
if (prevCancelSource != null) {
prevCancelSource.getController().cancel();
}
}
private AsyncDataController startNewSession(Registration registration) {
assert inOrderExecutor.isExecutingInThis();
clearCurrentSession();
registration.attach();
return startNewSession();
}
// session must be cleared before this method call.
private AsyncDataController startNewSession() {
assert currentSession.state == ProviderState.NOT_STARTED;
assert inOrderExecutor.isExecutingInThis();
currentSession.controller = wrappedDataLink.getData(
currentSession.cancelSource.getToken(), dispatcher);
currentSession.state = ProviderState.RUNNING;
return currentSession.controller;
}
private RefCachedData<DataType> getCurrentCachedData() {
assert inOrderExecutor.isExecutingInThis();
VolatileReference<DataType> cachedDataRef = currentSession.cachedData;
if (cachedDataRef == null) {
return null;
}
DataType cachedData = cachedDataRef.get();
if (cachedData == null) {
return null;
}
return new RefCachedData<>(cachedData, cachedDataRef);
}
private AsyncDataController attachToSession(Registration registration) {
assert currentSession.state == ProviderState.RUNNING;
assert inOrderExecutor.isExecutingInThis();
registration.attach();
RefCachedData<DataType> cachedData = getCurrentCachedData();
if (cachedData != null) {
registration.onDataArrive(cachedData);
return currentSession.controller;
}
else {
// We must restart the data retrieval if no more data will be
// provided by the underlying data link and so must also replace
// the controller.
return registration.createReplacableController(currentSession.controller);
}
}
private AsyncDataController attachToDoneSession(Registration registration) {
assert currentSession.state == ProviderState.DONE;
assert inOrderExecutor.isExecutingInThis();
RefCachedData<DataType> cachedData = getCurrentCachedData();
if (cachedData != null) {
try {
registration.onDataArrive(cachedData);
} finally {
registration.onDoneReceive(currentSession.finalReport);
}
return DoNothingDataController.INSTANCE;
}
else {
return startNewSession(registration);
}
}
private void dispatchData(DataType data) {
assert inOrderExecutor.isExecutingInThis();
currentSession.receivedData = true;
RefCachedData<DataType> dataRef = new RefCachedData<>(data, refCreator, refType);
// The previous data can be removed from the cache since, we have a new
// more accurate one.
VolatileReference<DataType> prevDataRef = currentSession.cachedData;
if (prevDataRef != null) {
prevDataRef.clear();
}
currentSession.cachedData = dataRef.getDataRef();
Throwable error = null;
for (Registration registration: currentRegistrations) {
try {
registration.onDataArrive(dataRef);
} catch (Throwable ex) {
if (error != null) error.addSuppressed(ex);
else error = ex;
}
}
if (error != null) {
ExceptionHelper.rethrow(error);
}
}
private void dispatchDone(AsyncReport report) {
assert inOrderExecutor.isExecutingInThis();
Throwable error = null;
currentSession.state = ProviderState.FINALIZING;
currentSession.controller = null;
boolean sessionReceivedData = currentSession.receivedData;
for (Registration registration: currentRegistrations) {
try {
// It is possible that a session was attached after the final data
// has been sent and before onDoneReceive was called.
// If data has been sent to the backing listener but not
// the attached listener, we can be sure that we need to re-request
// the data for that particular listener. These listeners will
// remain in "currentRegistrations".
if (!sessionReceivedData || registration.receivedData) {
registration.onDoneReceive(report);
// Notice that the onDoneReceive method call will remove
// "registration" from "currentRegistrations".
}
} catch (Throwable ex) {
if (error != null) error.addSuppressed(ex);
else error = ex;
}
}
try {
if (!currentRegistrations.isEmpty()) {
clearCurrentSession();
AsyncDataController newController = startNewSession();
for (Registration registration: currentRegistrations) {
registration.replaceController(newController);
}
}
else {
currentSession.finalReport = report;
currentSession.state = ProviderState.DONE;
}
} catch (Throwable ex) {
if (error != null) error.addSuppressed(ex);
else error = ex;
}
if (error != null) {
ExceptionHelper.rethrow(error);
}
}
private void checkStopCancellation() {
assert inOrderExecutor.isExecutingInThis();
if (!currentRegistrations.isEmpty()) {
Future<?> currentCancelFuture = currentSession.cancelTimerFuture;
currentSession.cancelTimerFuture = null;
if (currentCancelFuture != null) {
currentCancelFuture.cancel(false);
}
}
}
private void checkSessionCancellation() {
assert inOrderExecutor.isExecutingInThis();
if (!currentSession.state.isCompleted() && currentRegistrations.isEmpty()) {
if (dataCancelTimeoutNanos == 0) {
clearCurrentSession();
return;
}
final SessionInfo<?> cancelSession = currentSession;
if (cancelSession.cancelTimerFuture == null) {
cancelSession.cancelTimerFuture = CANCEL_TIMER.schedule(new Runnable() {
@Override
public void run() {
executeSynchronized(new Runnable() {
@Override
public void run() {
if (cancelSession.cancelTimerFuture != null) {
clearCurrentSession();
}
}
});
}
}, dataCancelTimeoutNanos, TimeUnit.NANOSECONDS);
}
}
}
@Override
public String toString() {
StringBuilder result = new StringBuilder(256);
result.append("Cache [");
result.append(refType);
result.append("] result of ");
AsyncFormatHelper.appendIndented(wrappedDataLink, result);
return result.toString();
}
private static class SessionInfo<DataType> {
public final CancellationSource cancelSource = Cancellation.createCancellationSource();
public ProviderState state = ProviderState.NOT_STARTED;
public AsyncDataController controller = null;
public VolatileReference<DataType> cachedData = null;
public boolean receivedData = false;
private Future<?> cancelTimerFuture = null;
private AsyncReport finalReport;
}
private class Registration {
private final CancellationToken cancelToken;
private final AsyncDataListener<RefCachedData<DataType>> safeListener;
private RefCollection.ElementRef<?> listenerRef;
private ListenerRef cancelRef;
private boolean receivedData;
private ReplacableController controller;
public Registration(
CancellationToken cancelToken,
AsyncDataListener<? super RefCachedData<DataType>> dataListener) {
ExceptionHelper.checkNotNullArgument(cancelToken, "cancelToken");
ExceptionHelper.checkNotNullArgument(dataListener, "dataListener");
this.cancelToken = cancelToken;
this.safeListener = AsyncHelper.makeSafeListener(dataListener);
this.listenerRef = null;
this.cancelRef = null;
this.receivedData = false;
this.controller = null;
}
public AsyncDataController createReplacableController(AsyncDataController initialController) {
assert inOrderExecutor.isExecutingInThis();
controller = new ReplacableController(initialController);
return controller;
}
public void replaceController(AsyncDataController newController) {
assert inOrderExecutor.isExecutingInThis();
if (controller == null) {
throw new IllegalStateException("Internal error: "
+ "Unexpected new AsyncDataController");
}
controller.replaceController(newController);
// We never need to restart the data transfer more than once and
// we only replace controller when we restart.
controller.willNotReplaceController();
}
public void onDataArrive(RefCachedData<DataType> dataRef) {
assert inOrderExecutor.isExecutingInThis();
receivedData = true;
try {
safeListener.onDataArrive(dataRef);
} finally {
// If we have received data, we will not replace the controller
// because that means that from now on, we will receive every
// data, so there is no reason to restart the data retrieval
// an so no new controller is needed.
if (controller != null) {
controller.willNotReplaceController();
}
}
}
public void onDoneReceive(AsyncReport report) {
assert inOrderExecutor.isExecutingInThis();
try {
safeListener.onDoneReceive(report);
} finally {
cleanup();
checkSessionCancellation();
}
}
public boolean hasReceivedData() {
assert inOrderExecutor.isExecutingInThis();
return receivedData;
}
private void removeFromList() {
assert inOrderExecutor.isExecutingInThis();
RefCollection.ElementRef<?> currentRef = listenerRef;
listenerRef = null;
if (currentRef != null) {
currentRef.remove();
}
}
private void removeFromCancelToken() {
assert inOrderExecutor.isExecutingInThis();
ListenerRef currentRef = cancelRef;
cancelRef = null;
if (currentRef != null) {
currentRef.unregister();
}
}
private void cleanup() {
assert inOrderExecutor.isExecutingInThis();
removeFromList();
removeFromCancelToken();
}
public void attach() {
assert inOrderExecutor.isExecutingInThis();
cleanup();
listenerRef = currentRegistrations.addGetReference(this);
checkStopCancellation();
cancelToken.addCancellationListener(new Runnable() {
@Override
public void run() {
executeSynchronized(new Runnable() {
@Override
public void run() {
onDoneReceive(AsyncReport.CANCELED);
}
});
}
});
}
}
/**
* Called by external code, so inherited methods are not executed in the
* context of inOrderExecutor.
*/
private class DispatcherListener implements AsyncDataListener<DataType> {
private final UpdateTaskExecutor dataExecutor;
public DispatcherListener() {
this.dataExecutor = new GenericUpdateTaskExecutor(inOrderExecutor);
}
@Override
public void onDataArrive(final DataType data) {
dataExecutor.execute(new Runnable() {
@Override
public void run() {
dispatchData(data);
}
});
}
@Override
public void onDoneReceive(final AsyncReport report) {
executeSynchronized(new Runnable() {
@Override
public void run() {
dispatchDone(report);
}
});
}
}
/**
* Called by external code, so inherited methods are not executed in the
* context of inOrderExecutor.
*/
private class ReplacableController implements AsyncDataController {
private List<Object> controllerArgs;
private volatile AsyncDataController currentController;
public ReplacableController(AsyncDataController initialController) {
ExceptionHelper.checkNotNullArgument(initialController, "initialController");
this.controllerArgs = new LinkedList<>();
this.currentController = initialController;
}
@Override
public void controlData(final Object controlArg) {
executeSynchronized(new Runnable() {
@Override
public void run() {
List<Object> collectedControllerArgs = controllerArgs;
if (collectedControllerArgs != null) {
collectedControllerArgs.add(controlArg);
}
currentController.controlData(controlArg);
}
});
}
@Override
public AsyncDataState getDataState() {
return currentController.getDataState();
}
public void replaceController(AsyncDataController controller) {
assert inOrderExecutor.isExecutingInThis();
ExceptionHelper.checkNotNullArgument(controller, "controller");
currentController = controller;
for (Object controlArg: controllerArgs) {
controller.controlData(controlArg);
}
}
public void willNotReplaceController() {
assert inOrderExecutor.isExecutingInThis();
controllerArgs = null;
}
}
private enum ProviderState {
NOT_STARTED(false),
RUNNING(false),
FINALIZING(true),
DONE(true);
private final boolean completed;
private ProviderState(boolean completed) {
this.completed = completed;
}
public boolean isCompleted() {
return completed;
}
}
}
|
package com.fsck.k9.mail.internet;
import com.fsck.k9.mail.K9LibRobolectricTestRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
@RunWith(K9LibRobolectricTestRunner.class)
public class DecoderUtilTest {
@Test
public void decodeEncodedWords_with_unencoded_data_returns_original_text() {
assertInputDecodesToExpected("abc", "abc");
}
@Test
public void decodeEncodedWords_withAsciiCharset_encoded_data_returns_text() {
assertInputDecodesToExpected("=?us-ascii?q?abc?=", "abc");
}
@Test
public void decodeEncodedWords_withStartOnly_encoding_format_returnAsText() {
assertInputDecodesToExpected("=?", "=?");
}
@Test
public void decodeEncodedWords_withStartAndSeparatorOnly_returnAsText() {
assertInputDecodesToExpected("=??", "=??");
}
@Test
public void decodeEncodedWords_withStartAnd2SeparatorOnly_returnAsText() {
assertInputDecodesToExpected("=???", "=???");
}
@Test
public void decodeEncodedWords_withStartAnd3SeparatorOnly_returnAsText() {
assertInputDecodesToExpected("=????", "=????");
}
@Test
public void decodeEncodedWords_withSeparatorsOnly_returnAsText() {
assertInputDecodesToExpected("=????=", "=????=");
}
@Test
public void decodeEncodedWords_withMissingCharset_returnAsText() {
assertInputDecodesToExpected("=??q??=", "=??q??=");
}
@Test
public void decodeEncodedWords_withTextAndMissingCharset_returnAsText() {
assertInputDecodesToExpected("=??q?a?=", "a");
}
@Test
public void decodeEncodedWords_withNoTextCharsetOrEncoding_returnAsText() {
assertInputDecodesToExpected("=??=", "=??=");
}
@Test
public void decodeEncodedWords_with_MissingEncodingAndData_returnAsText() {
assertInputDecodesToExpected("=?x?=", "=?x?=");
}
@Test
public void decodeEncodedWords_withMissingEncoding_returnAsText() {
assertInputDecodesToExpected("=?x??=", "=?x??=");
}
@Test
public void decodeEncodedWords_with_incompleteEncodingFormat_returnAsText() {
assertInputDecodesToExpected("=?x?q?=", "=?x?q?=");
}
@Test
public void decodeEncodedWords_with_unrecognisedEncoding_withEmptyEncodedData_returnAsText() {
assertInputDecodesToExpected("=?x?q??=", "=?x?q??=");
}
@Test
public void decodeEncodedWords_withUnrecognisedEncoding_withEncodedData_return_encoded_data() {
assertInputDecodesToExpected("=?x?q?X?=", "X");
}
@Test
public void decodeEncodedWords_withInvalidBase64String_returnsEmptyString() {
assertInputDecodesToExpected("=?us-ascii?b?abc?=", "");
}
@Test
public void decodeEncodedWords_withPartiallyEncoded_returnsBothSections() {
assertInputDecodesToExpected("=?us-ascii?q?abc?= =?", "abc =?");
}
@Test
public void decodeEncodedWords_withPartiallyEncodedAfter_returnsBothSections() {
assertInputDecodesToExpected("def=?us-ascii?q?abc?=", "defabc");
}
@Test
public void decodeEncodedWords_withUnrecognisedCharset_returnsEncodedData() {
assertInputDecodesToExpected("=?x?= =?", "=?x?= =?");
}
@Test
public void decodeEncodedWords_withMultipleEncodedSections_decodesBoth() {
assertInputDecodesToExpected("=?us-ascii?q?abc?= =?us-ascii?q?def?=", "abcdef");
}
@Test
public void decodeEncodedWords_withMultipleEncodedSections_decodesSequentialSectionTogether() {
// Splitting mid-character is RFC2047 non-compliant but seen in practice.
// "=?utf-8?B?b2hhaSDw?=" individually decodes to "ohai "
// "=?utf-8?B?n5Kp==?=" individually decodes to ""
assertInputDecodesToExpected("=?utf-8?B?b2hhaSDw?= =?utf-8?B?n5Kp?=", "ohai ");
}
@Test
public void decodeEncodedWords_withRFC2047examples_decodesCorrectly() {
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a?=)", "(a)");
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a?= b)", "(a b)");
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a?= =?ISO-8859-1?Q?b?=)", "(ab)");
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a?= =?ISO-8859-1?Q?b?=)", "(ab)");
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a?= \n =?ISO-8859-1?Q?b?=)", "(ab)");
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a_b?=)", "(a b)");
assertInputDecodesToExpected("(=?ISO-8859-1?Q?a?= =?ISO-8859-2?Q?_b?=)", "(a b)");
}
private void assertInputDecodesToExpected(String input, String expected) {
String decodedText = DecoderUtil.decodeEncodedWords(input, null);
assertEquals(expected, decodedText);
}
}
|
package com.stevesun.common.classes;
import com.stevesun.common.utils.CommonUtils;
import java.util.List;
/**
* Normally, both val and next should be private attributes and generate getter and setter for them,
* but for the convenience of leetcode solutions, I set them as public.
*/
public class ListNode {
public int val;
public ListNode next;
public ListNode(int i) {
this.val = i;
}
public int val() {
return val;
}
public static ListNode createSinglyLinkedList() {
ListNode head = new ListNode(1);
ListNode node1 = new ListNode(2);
head.next = node1;
ListNode node2 = new ListNode(3);
node1.next = node2;
ListNode node3 = new ListNode(4);
node2.next = node3;
ListNode node4 = new ListNode(5);
node3.next = node4;
ListNode node5 = new ListNode(6);
node4.next = node5;
ListNode node6 = new ListNode(7);
node5.next = node6;
ListNode node7 = new ListNode(8);
node6.next = node7;
ListNode node8 = new ListNode(9);
node7.next = node8;
ListNode node9 = new ListNode(10);
node8.next = node9;
return head;
}
/**TODO: this function is NOT working as supposed to, I need to fix it! Commit from my Windows machine!*/
public static ListNode createSinglyLinkedList(List<Integer> listValues) {
if (listValues == null || listValues.size() == 0)
throw new IllegalArgumentException(
"Please pass in a valid listValues to create a singly linked list.");
ListNode head = new ListNode(listValues.get(0));
for(int i : listValues){
appendNode(head, i);
}
printList(head);
return head;
}
private static void appendNode(ListNode head, int i) {
ListNode node = new ListNode(i);
head.next = node;
// head = head.next;
}
public static void printList(ListNode head) {
ListNode temp = head;
System.out.println();
while (temp != null) {
System.out.print(temp.val() + "\t");
temp = temp.next;
}
}
public static void main(String...strings){
List<Integer> values = CommonUtils.randomIntArrayGenerator(10, 20);
createSinglyLinkedList(values);
ListNode head = createSinglyLinkedList();
printList(head);
System.out.println("The end.");
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ListNode)) return false;
ListNode listNode = (ListNode) o;
if (val != listNode.val) return false;
return next != null ? next.equals(listNode.next) : listNode.next == null;
}
@Override
public int hashCode() {
int result = val;
result = 31 * result + (next != null ? next.hashCode() : 0);
return result;
}
@Override
public String toString() {
return "ListNode{" +
"val=" + val +
", next=" + next +
'}';
}
}
|
package com.eggheadgames.aboutbox.activity;
import android.app.Activity;
import android.content.ActivityNotFoundException;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.widget.Toast;
import com.danielstone.materialaboutlibrary.MaterialAboutActivity;
import com.danielstone.materialaboutlibrary.items.MaterialAboutActionItem;
import com.danielstone.materialaboutlibrary.items.MaterialAboutItemOnClickListener;
import com.danielstone.materialaboutlibrary.items.MaterialAboutTitleItem;
import com.danielstone.materialaboutlibrary.model.MaterialAboutCard;
import com.danielstone.materialaboutlibrary.model.MaterialAboutList;
import com.eggheadgames.aboutbox.AboutConfig;
import com.eggheadgames.aboutbox.IAnalytic;
import com.eggheadgames.aboutbox.R;
import com.eggheadgames.aboutbox.share.EmailUtil;
import com.eggheadgames.aboutbox.share.ShareUtil;
public class AboutActivity extends MaterialAboutActivity {
public static void launch(Activity activity) {
Intent intent = new Intent(activity, AboutActivity.class);
activity.startActivity(intent);
}
@Override
protected MaterialAboutList getMaterialAboutList(Context context) {
final AboutConfig config = AboutConfig.getInstance();
return new MaterialAboutList.Builder()
.addCard(buildGeneralInfoCard(config))
.addCard(buildSupportCard(config))
.addCard(buildShareCard(config))
.addCard(buildAboutCard(config))
.addCard(buildSocialNetworksCard(config))
.addCard(buildPrivacyCard(config))
.build();
}
@NonNull
private MaterialAboutCard buildGeneralInfoCard(AboutConfig config) {
MaterialAboutCard.Builder generalInfoCardBuilder = new MaterialAboutCard.Builder();
generalInfoCardBuilder.addItem(new MaterialAboutTitleItem.Builder()
.text(config.appName)
.icon(config.appIcon)
.build());
generalInfoCardBuilder.addItem(new MaterialAboutActionItem.Builder()
.text(R.string.egab_version)
.subText(config.version)
.build());
return generalInfoCardBuilder.build();
}
@NonNull
private MaterialAboutCard buildSupportCard(final AboutConfig config) {
MaterialAboutCard.Builder card = new MaterialAboutCard.Builder();
if (!TextUtils.isEmpty(config.guideHtmlPath)) {
card.addItem(itemHelper(R.string.egab_guide, R.drawable.ic_help_green,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
openHTMLPage(config.guideHtmlPath);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_guide));
}
})
);
}
card.addItem(itemHelper(R.string.egab_contact_support, R.drawable.ic_email_black,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
EmailUtil.contactUs(AboutActivity.this);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_contact_log_event));
}
}));
return card.build();
}
@NonNull
private MaterialAboutCard buildShareCard(final AboutConfig config) {
MaterialAboutCard.Builder card = new MaterialAboutCard.Builder();
card.addItem(itemHelper(R.string.egab_leave_review, R.drawable.ic_review,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
openApp(config.buildType, config.packageName);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_review_log_event));
}
}));
card.addItem(itemHelper(R.string.egab_share, R.drawable.ic_share_black,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
ShareUtil.share(AboutActivity.this);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_share_log_event));
}
}));
return card.build();
}
@NonNull
private MaterialAboutCard buildAboutCard(final AboutConfig config) {
MaterialAboutCard.Builder card = new MaterialAboutCard.Builder();
card.addItem(itemHelper(R.string.egab_try_other_apps, R.drawable.ic_try_other_apps,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
openPublisher(config.buildType, config.appPublisher, config.packageName);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_try_other_app_log_event));
}
}));
card.addItem(new MaterialAboutActionItem.Builder()
.text(config.aboutLabelTitle)
.icon(R.drawable.ic_about_black)
.setOnClickListener(new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
if (config.dialog == null) {
openHTMLPage(config.companyHtmlPath);
} else {
config.dialog.open(AboutActivity.this, config.companyHtmlPath, config.aboutLabelTitle);
}
logUIEventName(config.analytics, config.logUiEventName, config.aboutLabelTitle);
}
})
.build());
return card.build();
}
@NonNull
private MaterialAboutCard buildSocialNetworksCard(final AboutConfig config) {
MaterialAboutCard.Builder card = new MaterialAboutCard.Builder();
card.addItem(new MaterialAboutActionItem.Builder()
.text(R.string.egab_facebook_label)
.subText(config.facebookUserName)
.icon(R.drawable.ic_facebook_24)
.setOnClickListener(new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
getOpenFacebookIntent(AboutActivity.this, config.facebookUserName);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_facebook_log_event));
}
})
.build());
card.addItem(new MaterialAboutActionItem.Builder()
.text(R.string.egab_twitter_label)
.subText(config.twitterUserName)
.icon(R.drawable.ic_twitter_24dp)
.setOnClickListener(new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
startTwitter(AboutActivity.this, config.twitterUserName);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_twitter_log_event));
}
})
.build());
card.addItem(new MaterialAboutActionItem.Builder()
.text(R.string.egab_web_label)
.subText(config.webHomePage.replace("https:
.icon(R.drawable.ic_web_black_24dp)
.setOnClickListener(new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
openHTMLPage(config.webHomePage);
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_website_log_event));
}
})
.build());
return card.build();
}
@NonNull
private MaterialAboutCard buildPrivacyCard(final AboutConfig config) {
MaterialAboutCard.Builder card = new MaterialAboutCard.Builder();
card.addItem(itemHelper(R.string.egab_privacy_policy, R.drawable.ic_privacy,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
if (config.dialog == null) {
openHTMLPage(config.privacyHtmlPath);
} else {
config.dialog.open(AboutActivity.this, config.privacyHtmlPath, getString(R.string.egab_privacy_policy));
}
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_privacy_log_event));
}
})
);
card.addItem(itemHelper(R.string.egab_acknowledgements, R.drawable.ic_acknowledgements,
new MaterialAboutItemOnClickListener() {
@Override
public void onClick(boolean b) {
if (config.dialog == null) {
openHTMLPage(config.acknowledgmentHtmlPath);
} else {
config.dialog.open(AboutActivity.this, config.acknowledgmentHtmlPath, getString(R.string.egab_acknowledgements));
}
logUIEventName(config.analytics, config.logUiEventName, getString(R.string.egab_acknowledgements_log_event));
}
})
);
return card.build();
}
private MaterialAboutActionItem itemHelper(int name, int icon, MaterialAboutItemOnClickListener listener) {
return new MaterialAboutActionItem.Builder()
.text(name)
.icon(icon)
.setOnClickListener(listener)
.build();
}
@Override
protected CharSequence getActivityTitle() {
return getString(R.string.egab_about_screen_title);
}
public static void getOpenFacebookIntent(Activity context, String name) {
try {
context.getPackageManager().getPackageInfo("com.facebook.katana", 0);
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("fb://profile/" + name));
context.startActivity(intent);
} catch (Exception e) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("https:
context.startActivity(intent);
} catch (Exception e1) {
Toast.makeText(context, R.string.egab_can_not_open, Toast.LENGTH_SHORT).show();
}
}
}
public static void startTwitter(Activity context, String name) {
try {
context.getPackageManager().getPackageInfo("com.twitter.android", 0);
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("twitter://user?screen_name=" + name));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
} catch (Exception e) {
try {
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://twitter.com/" + name));
context.startActivity(intent);
} catch (Exception e1) {
Toast.makeText(context, R.string.egab_can_not_open, Toast.LENGTH_SHORT).show();
}
}
}
public void openApp(AboutConfig.BuildType buildType, String packageName) {
String appURI = null;
String webURI = null;
switch (buildType) {
case GOOGLE:
appURI = "market://details?id=" + packageName;
webURI = "http://play.google.com/store/apps/details?id=" + packageName;
break;
case AMAZON:
appURI = "amzn://apps/android?p=" + packageName;
webURI = "http:
break;
default:
//nothing
}
openApplication(appURI, webURI);
}
public void openPublisher(AboutConfig.BuildType buildType, String publisher, String packageName) {
String appURI = null;
String webURI = null;
switch (buildType) {
case GOOGLE:
appURI = "market://search?q=pub:" + publisher;
webURI = "http://play.google.com/store/search?q=pub:" + publisher;
break;
case AMAZON:
appURI = "amzn://apps/android?showAll=1&p=" + packageName;
webURI = "http:
break;
default:
//nothing
}
openApplication(appURI, webURI);
}
private void openApplication(String appURI, String webURI) {
try {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(appURI)));
} catch (ActivityNotFoundException e1) {
try {
openHTMLPage(webURI);
} catch (ActivityNotFoundException e2) {
Toast.makeText(this, R.string.egab_can_not_open, Toast.LENGTH_SHORT).show();
}
}
}
private void openHTMLPage(String htmlPath) {
startActivity(new Intent(Intent.ACTION_VIEW, Uri.parse(htmlPath)));
}
private void logUIEventName(IAnalytic analytics, String eventType, String eventValue) {
if (analytics != null) {
analytics.logUiEvent(eventType, eventValue);
}
}
}
|
package com.ironz.binaryprefs.lock;
import com.ironz.binaryprefs.file.directory.DirectoryProvider;
import java.io.File;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
* Simple lock factory for providing lock by instance and global lock by preference name.
*/
public final class SimpleLockFactoryImpl implements LockFactory {
private static final String LOCK_EXTENSION = ".lock";
private static final Map<String, ReadWriteLock> locks = new ConcurrentHashMap<>();
private final Map<String, Lock> processLocks = new ConcurrentHashMap<>();
private final String name;
private final File lockDirectory;
public SimpleLockFactoryImpl(String name, DirectoryProvider directoryProvider) {
this.name = name;
this.lockDirectory = directoryProvider.getLockDirectory();
init(name);
}
private void init(String name) {
initLocalLocks(name);
initProcessLocks(name);
}
private void initLocalLocks(String name) {
if (locks.containsKey(name)) {
return;
}
ReadWriteLock lock = new ReentrantReadWriteLock(true);
locks.put(name, lock);
}
private void initProcessLocks(String name) {
if (processLocks.containsKey(name)) {
return;
}
File lockFile = new File(lockDirectory, name + LOCK_EXTENSION);
Lock fileLock = new ProcessFileLock(lockFile);
processLocks.put(name, fileLock);
}
@Override
public Lock getReadLock() {
ReadWriteLock readWriteLock = locks.get(name);
return readWriteLock.readLock();
}
@Override
public Lock getWriteLock() {
ReadWriteLock readWriteLock = locks.get(name);
return readWriteLock.writeLock();
}
@Override
public Lock getProcessLock() {
return processLocks.get(name);
}
}
|
package com.matthewtamlin.spyglass.library.core;
import android.content.Context;
import android.content.res.TypedArray;
import android.os.Looper;
import android.util.AttributeSet;
import android.view.View;
import com.matthewtamlin.spyglass.library.default_adapters.DefaultAdapter;
import com.matthewtamlin.spyglass.library.handler_adapters.HandlerAdapter;
import com.matthewtamlin.spyglass.library.handler_adapters.HandlerAdapter.TypedArrayAccessor;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.Arrays;
import static com.matthewtamlin.java_utilities.checkers.NullChecker.checkNotNull;
import static com.matthewtamlin.spyglass.library.util.AdapterUtil.getDefaultAdapter;
import static com.matthewtamlin.spyglass.library.util.AdapterUtil.getHandlerAdapter;
import static com.matthewtamlin.spyglass.library.util.AnnotationUtil.getDefaultAnnotation;
import static com.matthewtamlin.spyglass.library.util.AnnotationUtil.getHandlerAnnotation;
import static com.matthewtamlin.spyglass.library.util.ValidationUtil.validateField;
import static com.matthewtamlin.spyglass.library.util.ValidationUtil.validateMethod;
public class Spyglass {
private View view;
private Context context;
private TypedArray attrSource;
private Spyglass(final Builder builder) {
this.view = builder.view;
this.context = builder.context;
this.attrSource = view.getContext().obtainStyledAttributes(
builder.attributeSet,
builder.styleableRes,
builder.defStyleAttr,
builder.defStyleRes);
}
public void bindDataToFields() {
checkMainThread();
for (final Field f : view.getClass().getDeclaredFields()) {
validateField(f);
processField(f);
}
}
public void passDataToMethods() {
checkMainThread();
for (final Method m : view.getClass().getDeclaredMethods()) {
validateMethod(m);
processMethod(m);
}
}
private void checkMainThread() {
if (Looper.myLooper() != Looper.getMainLooper()) {
throw new IllegalThreadException("Spyglasses must only be touched by the UI thread.");
}
}
private void processField(final Field field) {
field.setAccessible(true);
final Annotation handlerAnnotation = getHandlerAnnotation(field);
if (handlerAnnotation != null) {
final HandlerAdapter<?, Annotation> handlerAdapter = getHandlerAdapter(field);
final TypedArrayAccessor<?> accessor = handlerAdapter.getAccessor(handlerAnnotation);
if (accessor.valueExistsInArray(attrSource)) {
bindDataToField(field, accessor.getValueFromArray(attrSource));
} else if (getDefaultAnnotation(field) != null) {
final DefaultAdapter<?, Annotation> defaultAdapter = getDefaultAdapter(field);
bindDataToField(
field,
defaultAdapter.getDefault(getDefaultAnnotation(field), context));
} else {
final String message = "Missing mandatory attribute %1$s in view %2$s.";
final int resId = handlerAdapter.getAttributeId(handlerAnnotation);
final String resIdName = context.getResources().getResourceEntryName(resId);
throw new MandatoryAttributeMissingException(
String.format(message, resIdName, view));
}
}
}
private void processMethod(final Method method) {
method.setAccessible(true);
final Annotation handlerAnnotation = getHandlerAnnotation(method);
if (handlerAnnotation != null) {
final HandlerAdapter<?, Annotation> handlerAdapter = getHandlerAdapter(method);
if (handlerAdapter.attributeValueIsAvailable(attrSource, handlerAnnotation)) {
final Object value = handlerAdapter.getAttributeValue(
attrSource,
handlerAnnotation);
// call method
} else {
final Annotation defaultAnnotation = getDefaultAnnotation(method);
if (defaultAnnotation != null) {
final DefaultAdapter<?, Annotation> defaultAdapter = getDefaultAdapter(method);
final Object defaultValue = defaultAdapter.getDefault(
defaultAnnotation,
view.getContext());
// call method
} else if (handlerAdapter.attributeIsMandatory(handlerAnnotation)) {
// throw exception
}
}
}
}
private void bindDataToField(final Field field, final Object value) {
try {
field.set(view, value);
} catch (final Exception e) {
final String message = "Failed to bind data to field %1$s.";
throw new SpyglassFieldBindException(String.format(message, value), e);
}
}
private void callMethod(final Method method, Object[] arguments) {
try {
method.invoke(view, arguments);
} catch (final Exception e) {
final String message = "Failed to call method %1$s with arguments %2$s.";
throw new SpyglassMethodCallException(
String.format(message, message, Arrays.toString(arguments)),
e);
}
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private View view;
private Context context;
private int styleableRes[];
private AttributeSet attributeSet;
private int defStyleAttr;
private int defStyleRes;
private Builder() {}
public void forView(final View view) {
this.view = view;
}
public void withContext(final Context context) {
this.context = context;
}
public void withStyleableResource(final int[] styleableRes) {
this.styleableRes = styleableRes;
}
public void withAttributeSet(final AttributeSet attributeSet) {
this.attributeSet = attributeSet;
}
public void withDefStyleAttr(final int defStyleAttr) {
this.defStyleAttr = defStyleAttr;
}
public void withDefStyleRes(final int defStyleRes) {
this.defStyleRes = defStyleRes;
}
public Spyglass build() {
checkNotNull(view, new InvalidBuilderStateException("Unable to build a Spyglass " +
"without a view. Call method forView(View) before calling build()."));
checkNotNull(context, new InvalidBuilderStateException("Unable to build a Spyglass " +
"without a context. Call method withContext(Context) before calling build()."));
checkNotNull(styleableRes, new InvalidBuilderStateException("Unable to build a " +
"Spyglass without a styleable resource. Call method withStyleableRes(int[]) " +
"before calling build()."));
return new Spyglass(this);
}
}
}
|
package com.mikepenz.materialdrawer;
import android.app.Activity;
import android.content.Context;
import android.graphics.Typeface;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.annotation.ColorInt;
import android.support.annotation.ColorRes;
import android.support.annotation.DimenRes;
import android.support.annotation.DrawableRes;
import android.support.annotation.LayoutRes;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.ImageView;
import android.widget.TextView;
import com.mikepenz.iconics.IconicsDrawable;
import com.mikepenz.materialdrawer.holder.ColorHolder;
import com.mikepenz.materialdrawer.holder.DimenHolder;
import com.mikepenz.materialdrawer.holder.ImageHolder;
import com.mikepenz.materialdrawer.holder.StringHolder;
import com.mikepenz.materialdrawer.icons.MaterialDrawerFont;
import com.mikepenz.materialdrawer.model.interfaces.IDrawerItem;
import com.mikepenz.materialdrawer.model.interfaces.IProfile;
import com.mikepenz.materialdrawer.util.DrawerImageLoader;
import com.mikepenz.materialdrawer.util.DrawerUIUtils;
import com.mikepenz.materialdrawer.util.IdDistributor;
import com.mikepenz.materialdrawer.view.BezelImageView;
import com.mikepenz.materialize.util.UIUtils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Stack;
public class AccountHeaderBuilder {
// global references to views we need later
protected View mAccountHeader;
protected ImageView mAccountHeaderBackground;
protected BezelImageView mCurrentProfileView;
protected View mAccountHeaderTextSection;
protected ImageView mAccountSwitcherArrow;
protected TextView mCurrentProfileName;
protected TextView mCurrentProfileEmail;
protected BezelImageView mProfileFirstView;
protected BezelImageView mProfileSecondView;
protected BezelImageView mProfileThirdView;
// global references to the profiles
protected IProfile mCurrentProfile;
protected IProfile mProfileFirst;
protected IProfile mProfileSecond;
protected IProfile mProfileThird;
// global stuff
protected boolean mSelectionListShown = false;
protected int mAccountHeaderTextSectionBackgroundResource = -1;
// the activity to use
protected Activity mActivity;
/**
* Pass the activity you use the drawer in ;)
*
* @param activity
* @return
*/
public AccountHeaderBuilder withActivity(@NonNull Activity activity) {
this.mActivity = activity;
return this;
}
// defines if we use the compactStyle
protected boolean mCompactStyle = false;
/**
* Defines if we should use the compact style for the header.
*
* @param compactStyle
* @return
*/
public AccountHeaderBuilder withCompactStyle(boolean compactStyle) {
this.mCompactStyle = compactStyle;
return this;
}
// the typeface used for textViews within the AccountHeader
protected Typeface mTypeface;
// the typeface used for name textView only. overrides mTypeface
protected Typeface mNameTypeface;
// the typeface used for email textView only. overrides mTypeface
protected Typeface mEmailTypeface;
/**
* Define the typeface which will be used for all textViews in the AccountHeader
*
* @param typeface
* @return
*/
public AccountHeaderBuilder withTypeface(@NonNull Typeface typeface) {
this.mTypeface = typeface;
return this;
}
/**
* Define the typeface which will be used for name textView in the AccountHeader.
* Overrides typeface supplied to {@link AccountHeaderBuilder#withTypeface(android.graphics.Typeface)}
*
* @param typeface
* @return
* @see #withTypeface(android.graphics.Typeface)
*/
public AccountHeaderBuilder withNameTypeface(@NonNull Typeface typeface) {
this.mNameTypeface = typeface;
return this;
}
/**
* Define the typeface which will be used for email textView in the AccountHeader.
* Overrides typeface supplied to {@link AccountHeaderBuilder#withTypeface(android.graphics.Typeface)}
*
* @param typeface
* @return
* @see #withTypeface(android.graphics.Typeface)
*/
public AccountHeaderBuilder withEmailTypeface(@NonNull Typeface typeface) {
this.mEmailTypeface = typeface;
return this;
}
// set the account header height
protected DimenHolder mHeight;
/**
* set the height for the header
*
* @param heightPx
* @return
*/
public AccountHeaderBuilder withHeightPx(int heightPx) {
this.mHeight = DimenHolder.fromPixel(heightPx);
return this;
}
/**
* set the height for the header
*
* @param heightDp
* @return
*/
public AccountHeaderBuilder withHeightDp(int heightDp) {
this.mHeight = DimenHolder.fromDp(heightDp);
return this;
}
/**
* set the height for the header by resource
*
* @param heightRes
* @return
*/
public AccountHeaderBuilder withHeightRes(@DimenRes int heightRes) {
this.mHeight = DimenHolder.fromResource(heightRes);
return this;
}
//the background color for the slider
protected ColorHolder mTextColor;
/**
* set the background for the slider as color
*
* @param textColor
* @return
*/
public AccountHeaderBuilder withTextColor(@ColorInt int textColor) {
this.mTextColor = ColorHolder.fromColor(textColor);
return this;
}
/**
* set the background for the slider as resource
*
* @param textColorRes
* @return
*/
public AccountHeaderBuilder withTextColorRes(@ColorRes int textColorRes) {
this.mTextColor = ColorHolder.fromColorRes(textColorRes);
return this;
}
//the current selected profile is visible in the list
protected boolean mCurrentHiddenInList = false;
/**
* hide the current selected profile from the list
*
* @param currentProfileHiddenInList
* @return
*/
public AccountHeaderBuilder withCurrentProfileHiddenInList(boolean currentProfileHiddenInList) {
mCurrentHiddenInList = currentProfileHiddenInList;
return this;
}
//set to hide the first or second line
protected boolean mSelectionFirstLineShown = true;
protected boolean mSelectionSecondLineShown = true;
/**
* set this to false if you want to hide the first line of the selection box in the header (first line would be the name)
*
* @param selectionFirstLineShown
* @return
* @deprecated replaced by {@link #withSelectionFirstLineShown}
*/
@Deprecated
public AccountHeaderBuilder withSelectionFistLineShown(boolean selectionFirstLineShown) {
this.mSelectionFirstLineShown = selectionFirstLineShown;
return this;
}
/**
* set this to false if you want to hide the first line of the selection box in the header (first line would be the name)
*
* @param selectionFirstLineShown
* @return
*/
public AccountHeaderBuilder withSelectionFirstLineShown(boolean selectionFirstLineShown) {
this.mSelectionFirstLineShown = selectionFirstLineShown;
return this;
}
/**
* set this to false if you want to hide the second line of the selection box in the header (second line would be the e-mail)
*
* @param selectionSecondLineShown
* @return
*/
public AccountHeaderBuilder withSelectionSecondLineShown(boolean selectionSecondLineShown) {
this.mSelectionSecondLineShown = selectionSecondLineShown;
return this;
}
//set one of these to define the text in the first or second line with in the account selector
protected String mSelectionFirstLine;
protected String mSelectionSecondLine;
/**
* set this to define the first line in the selection area if there is no profile
* note this will block any values from profiles!
*
* @param selectionFirstLine
* @return
*/
public AccountHeaderBuilder withSelectionFirstLine(String selectionFirstLine) {
this.mSelectionFirstLine = selectionFirstLine;
return this;
}
/**
* set this to define the second line in the selection area if there is no profile
* note this will block any values from profiles!
*
* @param selectionSecondLine
* @return
*/
public AccountHeaderBuilder withSelectionSecondLine(String selectionSecondLine) {
this.mSelectionSecondLine = selectionSecondLine;
return this;
}
// set no divider below the header
protected boolean mPaddingBelowHeader = true;
/**
* Set this to false if you want no padding below the Header
*
* @param paddingBelowHeader
* @return
*/
public AccountHeaderBuilder withPaddingBelowHeader(boolean paddingBelowHeader) {
this.mPaddingBelowHeader = paddingBelowHeader;
return this;
}
// set no divider below the header
protected boolean mDividerBelowHeader = true;
/**
* Set this to false if you want no divider below the Header
*
* @param dividerBelowHeader
* @return
*/
public AccountHeaderBuilder withDividerBelowHeader(boolean dividerBelowHeader) {
this.mDividerBelowHeader = dividerBelowHeader;
return this;
}
// set non translucent statusBar mode
protected boolean mTranslucentStatusBar = true;
/**
* Set or disable this if you use a translucent statusbar
*
* @param translucentStatusBar
* @return
*/
public AccountHeaderBuilder withTranslucentStatusBar(boolean translucentStatusBar) {
this.mTranslucentStatusBar = translucentStatusBar;
return this;
}
//the background for the header
protected ImageHolder mHeaderBackground;
/**
* set the background for the slider as color
*
* @param headerBackground
* @return
*/
public AccountHeaderBuilder withHeaderBackground(Drawable headerBackground) {
this.mHeaderBackground = new ImageHolder(headerBackground);
return this;
}
/**
* set the background for the header as resource
*
* @param headerBackgroundRes
* @return
*/
public AccountHeaderBuilder withHeaderBackground(@DrawableRes int headerBackgroundRes) {
this.mHeaderBackground = new ImageHolder(headerBackgroundRes);
return this;
}
/**
* set the background for the header via the ImageHolder class
*
* @param headerBackground
* @return
*/
public AccountHeaderBuilder withHeaderBackground(ImageHolder headerBackground) {
this.mHeaderBackground = headerBackground;
return this;
}
//background scale type
protected ImageView.ScaleType mHeaderBackgroundScaleType = null;
/**
* define the ScaleType for the header background
*
* @param headerBackgroundScaleType
* @return
*/
public AccountHeaderBuilder withHeaderBackgroundScaleType(ImageView.ScaleType headerBackgroundScaleType) {
this.mHeaderBackgroundScaleType = headerBackgroundScaleType;
return this;
}
//profile images in the header are shown or not
protected boolean mProfileImagesVisible = true;
/**
* define if the profile images in the header are shown or not
*
* @param profileImagesVisible
* @return
*/
public AccountHeaderBuilder withProfileImagesVisible(boolean profileImagesVisible) {
this.mProfileImagesVisible = profileImagesVisible;
return this;
}
//only the main profile image is visible
protected boolean mOnlyMainProfileImageVisible = false;
/**
* define if only the main (current selected) profile image should be visible
*
* @param onlyMainProfileImageVisible
* @return
*/
public AccountHeaderBuilder withOnlyMainProfileImageVisible(boolean onlyMainProfileImageVisible) {
this.mOnlyMainProfileImageVisible = onlyMainProfileImageVisible;
return this;
}
//close the drawer after a profile was clicked in the list
protected Boolean mCloseDrawerOnProfileListClick = null;
/**
* define if the drawer should close if the user clicks on a profile item if the selection list is shown
*
* @param closeDrawerOnProfileListClick
* @return
*/
public AccountHeaderBuilder withCloseDrawerOnProfileListClick(boolean closeDrawerOnProfileListClick) {
this.mCloseDrawerOnProfileListClick = closeDrawerOnProfileListClick;
return this;
}
//reset the drawer list to the main drawer list after the profile was clicked in the list
protected boolean mResetDrawerOnProfileListClick = true;
/**
* define if the drawer selection list should be reseted after the user clicks on a profile item if the selection list is shown
*
* @param resetDrawerOnProfileListClick
* @return
*/
public AccountHeaderBuilder withResetDrawerOnProfileListClick(boolean resetDrawerOnProfileListClick) {
this.mResetDrawerOnProfileListClick = resetDrawerOnProfileListClick;
return this;
}
// set the profile images clickable or not
protected boolean mProfileImagesClickable = true;
/**
* enable or disable the profile images to be clickable
*
* @param profileImagesClickable
* @return
*/
public AccountHeaderBuilder withProfileImagesClickable(boolean profileImagesClickable) {
this.mProfileImagesClickable = profileImagesClickable;
return this;
}
// set to use the alternative profile header switching
protected boolean mAlternativeProfileHeaderSwitching = false;
/**
* enable the alternative profile header switching
*
* @param alternativeProfileHeaderSwitching
* @return
*/
public AccountHeaderBuilder withAlternativeProfileHeaderSwitching(boolean alternativeProfileHeaderSwitching) {
this.mAlternativeProfileHeaderSwitching = alternativeProfileHeaderSwitching;
return this;
}
// enable 3 small header previews
protected boolean mThreeSmallProfileImages = false;
/**
* enable the extended profile icon view with 3 small header images instead of two
*
* @param threeSmallProfileImages
* @return
*/
public AccountHeaderBuilder withThreeSmallProfileImages(boolean threeSmallProfileImages) {
this.mThreeSmallProfileImages = threeSmallProfileImages;
return this;
}
//the delay which is waited before the drawer is closed
protected int mOnProfileClickDrawerCloseDelay = 100;
/**
* Define the delay for the drawer close operation after a click.
* This is a small trick to improve the speed (and remove lag) if you open a new activity after a DrawerItem
* was selected.
* NOTE: Disable this by passing -1
*
* @param onProfileClickDrawerCloseDelay the delay in MS (-1 to disable)
* @return
*/
public AccountHeaderBuilder withOnProfileClickDrawerCloseDelay(int onProfileClickDrawerCloseDelay) {
this.mOnProfileClickDrawerCloseDelay = onProfileClickDrawerCloseDelay;
return this;
}
// the onAccountHeaderProfileImageListener to set
protected AccountHeader.OnAccountHeaderProfileImageListener mOnAccountHeaderProfileImageListener;
/**
* set click / longClick listener for the header images
*
* @param onAccountHeaderProfileImageListener
* @return
*/
public AccountHeaderBuilder withOnAccountHeaderProfileImageListener(AccountHeader.OnAccountHeaderProfileImageListener onAccountHeaderProfileImageListener) {
this.mOnAccountHeaderProfileImageListener = onAccountHeaderProfileImageListener;
return this;
}
// the onAccountHeaderSelectionListener to set
protected AccountHeader.OnAccountHeaderSelectionViewClickListener mOnAccountHeaderSelectionViewClickListener;
/**
* set a onSelection listener for the selection box
*
* @param onAccountHeaderSelectionViewClickListener
* @return
*/
public AccountHeaderBuilder withOnAccountHeaderSelectionViewClickListener(AccountHeader.OnAccountHeaderSelectionViewClickListener onAccountHeaderSelectionViewClickListener) {
this.mOnAccountHeaderSelectionViewClickListener = onAccountHeaderSelectionViewClickListener;
return this;
}
//set the selection list enabled if there is only a single profile
protected boolean mSelectionListEnabledForSingleProfile = true;
/**
* enable or disable the selection list if there is only a single profile
*
* @param selectionListEnabledForSingleProfile
* @return
*/
public AccountHeaderBuilder withSelectionListEnabledForSingleProfile(boolean selectionListEnabledForSingleProfile) {
this.mSelectionListEnabledForSingleProfile = selectionListEnabledForSingleProfile;
return this;
}
//set the selection enabled disabled
protected boolean mSelectionListEnabled = true;
/**
* enable or disable the selection list
*
* @param selectionListEnabled
* @return
*/
public AccountHeaderBuilder withSelectionListEnabled(boolean selectionListEnabled) {
this.mSelectionListEnabled = selectionListEnabled;
return this;
}
// the drawerLayout to use
protected View mAccountHeaderContainer;
/**
* You can pass a custom view for the drawer lib. note this requires the same structure as the drawer.xml
*
* @param accountHeader
* @return
*/
public AccountHeaderBuilder withAccountHeader(@NonNull View accountHeader) {
this.mAccountHeaderContainer = accountHeader;
return this;
}
/**
* You can pass a custom layout for the drawer lib. see the drawer.xml in layouts of this lib on GitHub
*
* @param resLayout
* @return
*/
public AccountHeaderBuilder withAccountHeader(@LayoutRes int resLayout) {
if (mActivity == null) {
throw new RuntimeException("please pass an activity first to use this call");
}
if (resLayout != -1) {
this.mAccountHeaderContainer = mActivity.getLayoutInflater().inflate(resLayout, null, false);
} else {
if (mCompactStyle) {
this.mAccountHeaderContainer = mActivity.getLayoutInflater().inflate(R.layout.material_drawer_compact_header, null, false);
} else {
this.mAccountHeaderContainer = mActivity.getLayoutInflater().inflate(R.layout.material_drawer_header, null, false);
}
}
return this;
}
// the profiles to display
protected ArrayList<IProfile> mProfiles;
/**
* set the arrayList of DrawerItems for the drawer
*
* @param profiles
* @return
*/
public AccountHeaderBuilder withProfiles(@NonNull ArrayList<IProfile> profiles) {
this.mProfiles = IdDistributor.checkIds(profiles);
return this;
}
/**
* add single ore more DrawerItems to the Drawer
*
* @param profiles
* @return
*/
public AccountHeaderBuilder addProfiles(@NonNull IProfile... profiles) {
if (this.mProfiles == null) {
this.mProfiles = new ArrayList<>();
}
Collections.addAll(this.mProfiles, IdDistributor.checkIds(profiles));
return this;
}
// the click listener to be fired on profile or selection click
protected AccountHeader.OnAccountHeaderListener mOnAccountHeaderListener;
/**
* add a listener for the accountHeader
*
* @param onAccountHeaderListener
* @return
*/
public AccountHeaderBuilder withOnAccountHeaderListener(AccountHeader.OnAccountHeaderListener onAccountHeaderListener) {
this.mOnAccountHeaderListener = onAccountHeaderListener;
return this;
}
//the on long click listener to be fired on profile longClick inside the list
protected AccountHeader.OnAccountHeaderItemLongClickListener mOnAccountHeaderItemLongClickListener;
/**
* the on long click listener to be fired on profile longClick inside the list
*
* @param onAccountHeaderItemLongClickListener
* @return
*/
public AccountHeaderBuilder withOnAccountHeaderItemLongClickListener(AccountHeader.OnAccountHeaderItemLongClickListener onAccountHeaderItemLongClickListener) {
this.mOnAccountHeaderItemLongClickListener = onAccountHeaderItemLongClickListener;
return this;
}
// the drawer to set the AccountSwitcher for
protected Drawer mDrawer;
/**
* @param drawer
* @return
*/
public AccountHeaderBuilder withDrawer(@NonNull Drawer drawer) {
this.mDrawer = drawer;
return this;
}
// savedInstance to restore state
protected Bundle mSavedInstance;
/**
* create the drawer with the values of a savedInstance
*
* @param savedInstance
* @return
*/
public AccountHeaderBuilder withSavedInstance(Bundle savedInstance) {
this.mSavedInstance = savedInstance;
return this;
}
/**
* helper method to set the height for the header!
*
* @param height
*/
private void setHeaderHeight(int height) {
if (mAccountHeaderContainer != null) {
ViewGroup.LayoutParams params = mAccountHeaderContainer.getLayoutParams();
if (params != null) {
params.height = height;
mAccountHeaderContainer.setLayoutParams(params);
}
View accountHeader = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header);
if (accountHeader != null) {
params = accountHeader.getLayoutParams();
params.height = height;
accountHeader.setLayoutParams(params);
}
View accountHeaderBackground = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_background);
if (accountHeaderBackground != null) {
params = accountHeaderBackground.getLayoutParams();
params.height = height;
accountHeaderBackground.setLayoutParams(params);
}
}
}
/**
* a small helper to handle the selectionView
*
* @param on
*/
private void handleSelectionView(IProfile profile, boolean on) {
if (on) {
if (Build.VERSION.SDK_INT >= 21) {
((FrameLayout) mAccountHeaderContainer).setForeground(UIUtils.getCompatDrawable(mAccountHeaderContainer.getContext(), mAccountHeaderTextSectionBackgroundResource));
mAccountHeaderContainer.setOnClickListener(onSelectionClickListener);
mAccountHeaderContainer.setTag(R.id.material_drawer_profile_header, profile);
} else {
mAccountHeaderTextSection.setBackgroundResource(mAccountHeaderTextSectionBackgroundResource);
mAccountHeaderTextSection.setOnClickListener(onSelectionClickListener);
mAccountHeaderTextSection.setTag(R.id.material_drawer_profile_header, profile);
}
} else {
if (Build.VERSION.SDK_INT >= 21) {
((FrameLayout) mAccountHeaderContainer).setForeground(null);
mAccountHeaderContainer.setOnClickListener(null);
} else {
UIUtils.setBackground(mAccountHeaderTextSection, null);
mAccountHeaderTextSection.setOnClickListener(null);
}
}
}
/**
* method to build the header view
*
* @return
*/
public AccountHeader build() {
// if the user has not set a accountHeader use the default one :D
if (mAccountHeaderContainer == null) {
withAccountHeader(-1);
}
// get the header view within the container
mAccountHeader = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header);
//the default min header height by default 148dp
int defaultHeaderMinHeight = mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_account_header_height);
int statusBarHeight = UIUtils.getStatusBarHeight(mActivity, true);
// handle the height for the header
int height;
if (mHeight != null) {
height = mHeight.asPixel(mActivity);
} else {
if (mCompactStyle) {
height = mActivity.getResources().getDimensionPixelSize(R.dimen.material_drawer_account_header_height_compact);
} else {
//calculate the header height by getting the optimal drawer width and calculating it * 9 / 16
height = (int) (DrawerUIUtils.getOptimalDrawerWidth(mActivity) * AccountHeader.NAVIGATION_DRAWER_ACCOUNT_ASPECT_RATIO);
//if we are lower than api 19 (>= 19 we have a translucentStatusBar) the height should be a bit lower
//probably even if we are non translucent on > 19 devices?
if (Build.VERSION.SDK_INT < 19) {
int tempHeight = height - statusBarHeight;
//if we are lower than api 19 we are not able to have a translucent statusBar so we remove the height of the statusBar from the padding
//to prevent display issues we only reduce the height if we still fit the required minHeight of 148dp (R.dimen.material_drawer_account_header_height)
//we remove additional 8dp from the defaultMinHeaderHeight as there is some buffer in the header and to prevent to large spacings
if (tempHeight > defaultHeaderMinHeight - UIUtils.convertDpToPixel(8, mActivity)) {
height = tempHeight;
}
}
}
}
// handle everything if we have a translucent status bar which only is possible on API >= 19
if (mTranslucentStatusBar && Build.VERSION.SDK_INT >= 19) {
mAccountHeader.setPadding(mAccountHeader.getPaddingLeft(), mAccountHeader.getPaddingTop() + statusBarHeight, mAccountHeader.getPaddingRight(), mAccountHeader.getPaddingBottom());
//in fact it makes no difference if we have a translucent statusBar or not. we want 9/16 just if we are not compact
if (mCompactStyle) {
height = height + statusBarHeight;
} else if ((height - statusBarHeight) <= defaultHeaderMinHeight) {
//if the height + statusBar of the header is lower than the required 148dp + statusBar we change the height to be able to display all the data
height = defaultHeaderMinHeight + statusBarHeight;
}
}
//set the height for the header
setHeaderHeight(height);
// get the background view
mAccountHeaderBackground = (ImageView) mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_background);
// set the background
ImageHolder.applyTo(mHeaderBackground, mAccountHeaderBackground, DrawerImageLoader.Tags.ACCOUNT_HEADER.name());
if (mHeaderBackgroundScaleType != null) {
mAccountHeaderBackground.setScaleType(mHeaderBackgroundScaleType);
}
// get the text color to use for the text section
int textColor = ColorHolder.color(mTextColor, mActivity, R.attr.material_drawer_header_selection_text, R.color.material_drawer_header_selection_text);
// set the background for the section
if (mCompactStyle) {
mAccountHeaderTextSection = mAccountHeader;
} else {
mAccountHeaderTextSection = mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_text_section);
}
mAccountHeaderTextSectionBackgroundResource = DrawerUIUtils.getSelectableBackground(mActivity);
handleSelectionView(mCurrentProfile, true);
// set the arrow :D
mAccountSwitcherArrow = (ImageView) mAccountHeaderContainer.findViewById(R.id.material_drawer_account_header_text_switcher);
mAccountSwitcherArrow.setImageDrawable(new IconicsDrawable(mActivity, MaterialDrawerFont.Icon.mdf_arrow_drop_down).sizeRes(R.dimen.material_drawer_account_header_dropdown).paddingRes(R.dimen.material_drawer_account_header_dropdown_padding).color(textColor));
//get the fields for the name
mCurrentProfileView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_current);
mCurrentProfileName = (TextView) mAccountHeader.findViewById(R.id.material_drawer_account_header_name);
mCurrentProfileEmail = (TextView) mAccountHeader.findViewById(R.id.material_drawer_account_header_email);
//set the typeface for the AccountHeader
if (mNameTypeface != null) {
mCurrentProfileName.setTypeface(mNameTypeface);
} else if (mTypeface != null) {
mCurrentProfileName.setTypeface(mTypeface);
}
if (mEmailTypeface != null) {
mCurrentProfileEmail.setTypeface(mEmailTypeface);
} else if (mTypeface != null) {
mCurrentProfileEmail.setTypeface(mTypeface);
}
mCurrentProfileName.setTextColor(textColor);
mCurrentProfileEmail.setTextColor(textColor);
mProfileFirstView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_small_first);
mProfileSecondView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_small_second);
mProfileThirdView = (BezelImageView) mAccountHeader.findViewById(R.id.material_drawer_account_header_small_third);
//calculate the profiles to set
calculateProfiles();
//process and build the profiles
buildProfiles();
// try to restore all saved values again
if (mSavedInstance != null) {
int selection = mSavedInstance.getInt(AccountHeader.BUNDLE_SELECTION_HEADER, -1);
if (selection != -1) {
//predefine selection (should be the first element
if (mProfiles != null && (selection) > -1 && selection < mProfiles.size()) {
switchProfiles(mProfiles.get(selection));
}
}
}
//everything created. now set the header
if (mDrawer != null) {
mDrawer.setHeader(mAccountHeaderContainer, mPaddingBelowHeader, mDividerBelowHeader);
}
//forget the reference to the activity
mActivity = null;
return new AccountHeader(this);
}
/**
* helper method to calculate the order of the profiles
*/
protected void calculateProfiles() {
if (mProfiles == null) {
mProfiles = new ArrayList<>();
}
if (mCurrentProfile == null) {
int setCount = 0;
for (int i = 0; i < mProfiles.size(); i++) {
if (mProfiles.size() > i && mProfiles.get(i).isSelectable()) {
if (setCount == 0 && (mCurrentProfile == null)) {
mCurrentProfile = mProfiles.get(i);
} else if (setCount == 1 && (mProfileFirst == null)) {
mProfileFirst = mProfiles.get(i);
} else if (setCount == 2 && (mProfileSecond == null)) {
mProfileSecond = mProfiles.get(i);
} else if (setCount == 3 && (mProfileThird == null)) {
mProfileThird = mProfiles.get(i);
}
setCount++;
}
}
return;
}
IProfile[] previousActiveProfiles = new IProfile[]{
mCurrentProfile,
mProfileFirst,
mProfileSecond,
mProfileThird
};
IProfile[] newActiveProfiles = new IProfile[4];
Stack<IProfile> unusedProfiles = new Stack<>();
// try to keep existing active profiles in the same positions
for (int i = 0; i < mProfiles.size(); i++) {
IProfile p = mProfiles.get(i);
if (p.isSelectable()) {
boolean used = false;
for (int j = 0; j < 4; j++) {
if (previousActiveProfiles[j] == p) {
newActiveProfiles[j] = p;
used = true;
break;
}
}
if (!used) {
unusedProfiles.push(p);
}
}
}
Stack<IProfile> activeProfiles = new Stack<>();
// try to fill the gaps with new available profiles
for (int i = 0; i < 4; i++) {
if (newActiveProfiles[i] != null) {
activeProfiles.push(newActiveProfiles[i]);
} else if (!unusedProfiles.isEmpty()) {
activeProfiles.push(unusedProfiles.pop());
}
}
Stack<IProfile> reversedActiveProfiles = new Stack<>();
while (!activeProfiles.empty()) {
reversedActiveProfiles.push(activeProfiles.pop());
}
// reassign active profiles
if (reversedActiveProfiles.isEmpty()) {
mCurrentProfile = null;
} else {
mCurrentProfile = reversedActiveProfiles.pop();
}
if (reversedActiveProfiles.isEmpty()) {
mProfileFirst = null;
} else {
mProfileFirst = reversedActiveProfiles.pop();
}
if (reversedActiveProfiles.isEmpty()) {
mProfileSecond = null;
} else {
mProfileSecond = reversedActiveProfiles.pop();
}
if (reversedActiveProfiles.isEmpty()) {
mProfileThird = null;
} else {
mProfileThird = reversedActiveProfiles.pop();
}
}
/**
* helper method to switch the profiles
*
* @param newSelection
* @return true if the new selection was the current profile
*/
protected boolean switchProfiles(IProfile newSelection) {
if (newSelection == null) {
return false;
}
if (mCurrentProfile == newSelection) {
return true;
}
if (mAlternativeProfileHeaderSwitching) {
int prevSelection = -1;
if (mProfileFirst == newSelection) {
prevSelection = 1;
} else if (mProfileSecond == newSelection) {
prevSelection = 2;
} else if (mProfileThird == newSelection) {
prevSelection = 3;
}
IProfile tmp = mCurrentProfile;
mCurrentProfile = newSelection;
if (prevSelection == 1) {
mProfileFirst = tmp;
} else if (prevSelection == 2) {
mProfileSecond = tmp;
} else if (prevSelection == 3) {
mProfileThird = tmp;
}
} else {
if (mProfiles != null) {
ArrayList<IProfile> previousActiveProfiles = new ArrayList<>(Arrays.asList(mCurrentProfile, mProfileFirst, mProfileSecond, mProfileThird));
if (previousActiveProfiles.contains(newSelection)) {
int position = -1;
for (int i = 0; i < 4; i++) {
if (previousActiveProfiles.get(i) == newSelection) {
position = i;
break;
}
}
if (position != -1) {
previousActiveProfiles.remove(position);
previousActiveProfiles.add(0, newSelection);
mCurrentProfile = previousActiveProfiles.get(0);
mProfileFirst = previousActiveProfiles.get(1);
mProfileSecond = previousActiveProfiles.get(2);
mProfileThird = previousActiveProfiles.get(3);
}
} else {
mProfileThird = mProfileSecond;
mProfileSecond = mProfileFirst;
mProfileFirst = mCurrentProfile;
mCurrentProfile = newSelection;
}
}
}
buildProfiles();
return false;
}
/**
* helper method to build the views for the ui
*/
protected void buildProfiles() {
mCurrentProfileView.setVisibility(View.INVISIBLE);
mAccountHeaderTextSection.setVisibility(View.INVISIBLE);
mAccountSwitcherArrow.setVisibility(View.INVISIBLE);
mProfileFirstView.setVisibility(View.GONE);
mProfileFirstView.setOnClickListener(null);
mProfileSecondView.setVisibility(View.GONE);
mProfileSecondView.setOnClickListener(null);
mProfileThirdView.setVisibility(View.GONE);
mProfileThirdView.setOnClickListener(null);
mCurrentProfileName.setText("");
mCurrentProfileEmail.setText("");
handleSelectionView(mCurrentProfile, true);
if (mCurrentProfile != null) {
if (mProfileImagesVisible || mOnlyMainProfileImageVisible) {
setImageOrPlaceholder(mCurrentProfileView, mCurrentProfile.getIcon());
if (mProfileImagesClickable) {
mCurrentProfileView.setOnClickListener(onCurrentProfileClickListener);
mCurrentProfileView.setOnLongClickListener(onCurrentProfileLongClickListener);
mCurrentProfileView.disableTouchFeedback(false);
} else {
mCurrentProfileView.disableTouchFeedback(true);
}
mCurrentProfileView.setVisibility(View.VISIBLE);
mCurrentProfileView.invalidate();
} else if (mCompactStyle) {
mCurrentProfileView.setVisibility(View.GONE);
}
mAccountHeaderTextSection.setVisibility(View.VISIBLE);
handleSelectionView(mCurrentProfile, true);
mAccountSwitcherArrow.setVisibility(View.VISIBLE);
mCurrentProfileView.setTag(R.id.material_drawer_profile_header, mCurrentProfile);
StringHolder.applyTo(mCurrentProfile.getName(), mCurrentProfileName);
StringHolder.applyTo(mCurrentProfile.getEmail(), mCurrentProfileEmail);
if (mProfileFirst != null && mProfileImagesVisible && !mOnlyMainProfileImageVisible) {
setImageOrPlaceholder(mProfileFirstView, mProfileFirst.getIcon());
mProfileFirstView.setTag(R.id.material_drawer_profile_header, mProfileFirst);
if (mProfileImagesClickable) {
mProfileFirstView.setOnClickListener(onProfileClickListener);
mProfileFirstView.setOnLongClickListener(onProfileLongClickListener);
mProfileFirstView.disableTouchFeedback(false);
} else {
mProfileFirstView.disableTouchFeedback(true);
}
mProfileFirstView.setVisibility(View.VISIBLE);
mProfileFirstView.invalidate();
}
if (mProfileSecond != null && mProfileImagesVisible && !mOnlyMainProfileImageVisible) {
setImageOrPlaceholder(mProfileSecondView, mProfileSecond.getIcon());
mProfileSecondView.setTag(R.id.material_drawer_profile_header, mProfileSecond);
if (mProfileImagesClickable) {
mProfileSecondView.setOnClickListener(onProfileClickListener);
mProfileSecondView.setOnLongClickListener(onProfileLongClickListener);
mProfileSecondView.disableTouchFeedback(false);
} else {
mProfileSecondView.disableTouchFeedback(true);
}
mProfileSecondView.setVisibility(View.VISIBLE);
mProfileSecondView.invalidate();
}
if (mProfileThird != null && mThreeSmallProfileImages && mProfileImagesVisible && !mOnlyMainProfileImageVisible) {
setImageOrPlaceholder(mProfileThirdView, mProfileThird.getIcon());
mProfileThirdView.setTag(R.id.material_drawer_profile_header, mProfileThird);
if (mProfileImagesClickable) {
mProfileThirdView.setOnClickListener(onProfileClickListener);
mProfileThirdView.setOnLongClickListener(onProfileLongClickListener);
mProfileThirdView.disableTouchFeedback(false);
} else {
mProfileThirdView.disableTouchFeedback(true);
}
mProfileThirdView.setVisibility(View.VISIBLE);
mProfileThirdView.invalidate();
}
} else if (mProfiles != null && mProfiles.size() > 0) {
IProfile profile = mProfiles.get(0);
mAccountHeaderTextSection.setTag(R.id.material_drawer_profile_header, profile);
mAccountHeaderTextSection.setVisibility(View.VISIBLE);
handleSelectionView(mCurrentProfile, true);
mAccountSwitcherArrow.setVisibility(View.VISIBLE);
if (mCurrentProfile != null) {
StringHolder.applyTo(mCurrentProfile.getName(), mCurrentProfileName);
StringHolder.applyTo(mCurrentProfile.getEmail(), mCurrentProfileEmail);
}
}
if (!mSelectionFirstLineShown) {
mCurrentProfileName.setVisibility(View.GONE);
}
if (!TextUtils.isEmpty(mSelectionFirstLine)) {
mCurrentProfileName.setText(mSelectionFirstLine);
mAccountHeaderTextSection.setVisibility(View.VISIBLE);
}
if (!mSelectionSecondLineShown) {
mCurrentProfileEmail.setVisibility(View.GONE);
}
if (!TextUtils.isEmpty(mSelectionSecondLine)) {
mCurrentProfileEmail.setText(mSelectionSecondLine);
mAccountHeaderTextSection.setVisibility(View.VISIBLE);
}
//if we disabled the list
if (!mSelectionListEnabled) {
mAccountSwitcherArrow.setVisibility(View.INVISIBLE);
handleSelectionView(null, false);
}
if (!mSelectionListEnabledForSingleProfile && mProfileFirst == null && (mProfiles == null || mProfiles.size() == 1)) {
mAccountSwitcherArrow.setVisibility(View.INVISIBLE);
handleSelectionView(null, false);
}
//if we disabled the list but still have set a custom listener
if (mOnAccountHeaderSelectionViewClickListener != null) {
handleSelectionView(mCurrentProfile, true);
}
}
/**
* small helper method to set an profile image or a placeholder
*
* @param iv
* @param imageHolder
*/
private void setImageOrPlaceholder(ImageView iv, ImageHolder imageHolder) {
//cancel previous started image loading processes
DrawerImageLoader.getInstance().cancelImage(iv);
//set the placeholder
iv.setImageDrawable(DrawerUIUtils.getPlaceHolder(iv.getContext()));
//set the real image (probably also the uri)
ImageHolder.applyTo(imageHolder, iv, DrawerImageLoader.Tags.PROFILE.name());
}
/**
* onProfileClickListener to notify onClick on the current profile image
*/
private View.OnClickListener onCurrentProfileClickListener = new View.OnClickListener() {
@Override
public void onClick(final View v) {
onProfileImageClick(v, true);
}
};
/**
* onProfileClickListener to notify onClick on a profile image
*/
private View.OnClickListener onProfileClickListener = new View.OnClickListener() {
@Override
public void onClick(final View v) {
onProfileImageClick(v, false);
}
};
/**
* calls the mOnAccountHEaderProfileImageListener and continues with the actions afterwards
*
* @param v
* @param current
*/
private void onProfileImageClick(View v, boolean current) {
IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header);
boolean consumed = false;
if (mOnAccountHeaderProfileImageListener != null) {
consumed = mOnAccountHeaderProfileImageListener.onProfileImageClick(v, profile, current);
}
//if the event was already consumed by the click don't continue. note that this will also stop the profile change event
if (!consumed) {
onProfileClick(v, current);
}
}
/**
* onProfileLongClickListener to call the onProfileImageLongClick on the current profile image
*/
private View.OnLongClickListener onCurrentProfileLongClickListener = new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
if (mOnAccountHeaderProfileImageListener != null) {
IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header);
return mOnAccountHeaderProfileImageListener.onProfileImageLongClick(v, profile, true);
}
return false;
}
};
/**
* onProfileLongClickListener to call the onProfileImageLongClick on a profile image
*/
private View.OnLongClickListener onProfileLongClickListener = new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
if (mOnAccountHeaderProfileImageListener != null) {
IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header);
return mOnAccountHeaderProfileImageListener.onProfileImageLongClick(v, profile, false);
}
return false;
}
};
protected void onProfileClick(View v, boolean current) {
final IProfile profile = (IProfile) v.getTag(R.id.material_drawer_profile_header);
switchProfiles(profile);
//reset the drawer content
resetDrawerContent(v.getContext());
//notify the MiniDrawer about the clicked profile (only if one exists and is hooked to the Drawer
if (mDrawer != null && mDrawer.getDrawerBuilder() != null && mDrawer.getDrawerBuilder().mMiniDrawer != null) {
mDrawer.getDrawerBuilder().mMiniDrawer.onProfileClick();
}
//notify about the changed profile
boolean consumed = false;
if (mOnAccountHeaderListener != null) {
consumed = mOnAccountHeaderListener.onProfileChanged(v, profile, current);
}
if (!consumed) {
if (mOnProfileClickDrawerCloseDelay > 0) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (mDrawer != null) {
mDrawer.closeDrawer();
}
}
}, mOnProfileClickDrawerCloseDelay);
} else {
if (mDrawer != null) {
mDrawer.closeDrawer();
}
}
}
}
/**
* get the current selection
*
* @return
*/
protected int getCurrentSelection() {
if (mCurrentProfile != null && mProfiles != null) {
int i = 0;
for (IProfile profile : mProfiles) {
if (profile == mCurrentProfile) {
return i;
}
i++;
}
}
return -1;
}
/**
* onSelectionClickListener to notify the onClick on the checkbox
*/
private View.OnClickListener onSelectionClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
boolean consumed = false;
if (mOnAccountHeaderSelectionViewClickListener != null) {
consumed = mOnAccountHeaderSelectionViewClickListener.onClick(v, (IProfile) v.getTag(R.id.material_drawer_profile_header));
}
if (mAccountSwitcherArrow.getVisibility() == View.VISIBLE && !consumed) {
toggleSelectionList(v.getContext());
}
}
};
/**
* helper method to toggle the collection
*
* @param ctx
*/
protected void toggleSelectionList(Context ctx) {
if (mDrawer != null) {
//if we already show the list. reset everything instead
if (mDrawer.switchedDrawerContent()) {
resetDrawerContent(ctx);
mSelectionListShown = false;
} else {
//build and set the drawer selection list
buildDrawerSelectionList();
// update the arrow image within the drawer
mAccountSwitcherArrow.setImageDrawable(new IconicsDrawable(ctx, MaterialDrawerFont.Icon.mdf_arrow_drop_up).sizeRes(R.dimen.material_drawer_account_header_dropdown).paddingRes(R.dimen.material_drawer_account_header_dropdown_padding).color(ColorHolder.color(mTextColor, ctx, R.attr.material_drawer_header_selection_text, R.color.material_drawer_header_selection_text)));
mSelectionListShown = true;
}
}
}
/**
* helper method to build and set the drawer selection list
*/
protected void buildDrawerSelectionList() {
int selectedPosition = -1;
int position = 0;
ArrayList<IDrawerItem> profileDrawerItems = new ArrayList<>();
if (mProfiles != null) {
for (IProfile profile : mProfiles) {
if (profile == mCurrentProfile) {
if (mCurrentHiddenInList) {
continue;
} else {
selectedPosition = position + mDrawer.getAdapter().getHeaderOffset();
}
}
if (profile instanceof IDrawerItem) {
((IDrawerItem) profile).withSetSelected(false);
profileDrawerItems.add((IDrawerItem) profile);
}
position = position + 1;
}
}
mDrawer.switchDrawerContent(onDrawerItemClickListener, onDrawerItemLongClickListener, profileDrawerItems, selectedPosition);
}
/**
* onDrawerItemClickListener to catch the selection for the new profile!
*/
private Drawer.OnDrawerItemClickListener onDrawerItemClickListener = new Drawer.OnDrawerItemClickListener() {
@Override
public boolean onItemClick(final View view, int position, final IDrawerItem drawerItem) {
final boolean isCurrentSelectedProfile;
if (drawerItem != null && drawerItem instanceof IProfile && drawerItem.isSelectable()) {
isCurrentSelectedProfile = switchProfiles((IProfile) drawerItem);
} else {
isCurrentSelectedProfile = false;
}
if (mResetDrawerOnProfileListClick) {
mDrawer.setOnDrawerItemClickListener(null);
}
//wrap the onSelection call and the reset stuff within a handler to prevent lag
if (mResetDrawerOnProfileListClick && mDrawer != null && view != null && view.getContext() != null) {
resetDrawerContent(view.getContext());
}
//notify the MiniDrawer about the clicked profile (only if one exists and is hooked to the Drawer
if (mDrawer != null && mDrawer.getDrawerBuilder() != null && mDrawer.getDrawerBuilder().mMiniDrawer != null) {
mDrawer.getDrawerBuilder().mMiniDrawer.onProfileClick();
}
boolean consumed = false;
if (drawerItem != null && drawerItem instanceof IProfile) {
if (mOnAccountHeaderListener != null) {
consumed = mOnAccountHeaderListener.onProfileChanged(view, (IProfile) drawerItem, isCurrentSelectedProfile);
}
}
//if a custom behavior was chosen via the CloseDrawerOnProfileListClick then use this. else react on the result of the onProfileChanged listener
if (mCloseDrawerOnProfileListClick != null) {
return !mCloseDrawerOnProfileListClick;
} else {
return consumed;
}
}
};
/**
* onDrawerItemLongClickListener to catch the longClick for a profile
*/
private Drawer.OnDrawerItemLongClickListener onDrawerItemLongClickListener = new Drawer.OnDrawerItemLongClickListener() {
@Override
public boolean onItemLongClick(View view, int position, IDrawerItem drawerItem) {
//if a longClickListener was defined use it
if (mOnAccountHeaderItemLongClickListener != null) {
final boolean isCurrentSelectedProfile;
isCurrentSelectedProfile = drawerItem != null && drawerItem.isSelected();
if (drawerItem != null && drawerItem instanceof IProfile) {
return mOnAccountHeaderItemLongClickListener.onProfileLongClick(view, (IProfile) drawerItem, isCurrentSelectedProfile);
}
}
return false;
}
};
/**
* helper method to reset the drawer content
*/
private void resetDrawerContent(Context ctx) {
if (mDrawer != null) {
mDrawer.resetDrawerContent();
}
mAccountSwitcherArrow.setImageDrawable(new IconicsDrawable(ctx, MaterialDrawerFont.Icon.mdf_arrow_drop_down).sizeRes(R.dimen.material_drawer_account_header_dropdown).paddingRes(R.dimen.material_drawer_account_header_dropdown_padding).color(ColorHolder.color(mTextColor, ctx, R.attr.material_drawer_header_selection_text, R.color.material_drawer_header_selection_text)));
}
/**
* small helper class to update the header and the list
*/
protected void updateHeaderAndList() {
//recalculate the profiles
calculateProfiles();
//update the profiles in the header
buildProfiles();
//if we currently show the list add the new item directly to it
if (mSelectionListShown) {
buildDrawerSelectionList();
}
}
}
|
package opendap.wcs.v2_0;
import org.jdom.Document;
import org.jdom.Element;
import org.jdom.output.Format;
import org.jdom.output.XMLOutputter;
import java.io.IOException;
import java.io.OutputStream;
import java.util.*;
public class GetCoverageRequest {
private static final String _request = "GetCoverage";
private String _coverageID;
private String _format;
private String _mediaType;
private HashMap<String, DimensionSubset> _dimensionSubsets;
private TemporalDimensionSubset _temporalSubset;
private RangeSubset _rangeSubset;
private String _requestUrl;
/**
* Request parameter
* kvp:
* &RangeSubset=r& selects one range component.
* &RangeSubset=r1,r2,r4& selects three range components
* &RangeSubset=r1:r4,r7& selects 5 range components.
*/
private ScaleRequest _scaleRequest;
public GetCoverageRequest(){
_coverageID = null;
_format = null;
_mediaType = null;
_dimensionSubsets = new HashMap<>();
_requestUrl = null;
_scaleRequest = null;
_rangeSubset = null;
}
/**
* Creates a GetCoverageRequest from the KVP in the request URL's query string.
* @param requestUrl
* @param kvp
* @throws WcsException
* @throws InterruptedException
*/
public GetCoverageRequest(String requestUrl, Map<String,String[]> kvp)
throws WcsException, InterruptedException {
this();
String s[];
_requestUrl = requestUrl;
// Make sure the client is looking for a WCS service....
s = kvp.get("service");
WCS.checkService(s==null? null : s[0]);
// Make sure the client can accept a supported WCS version...
s = kvp.get("version");
WCS.checkVersion( s==null ? null : s[0]);
// Make sure the client is actually asking for this operation
s = kvp.get("request");
if(s == null){
throw new WcsException("Poorly formatted request URL. Missing " +
"key value pair for 'request'",
WcsException.MISSING_PARAMETER_VALUE,"request");
}
else if(!s[0].equalsIgnoreCase(_request)){
throw new WcsException("The servers internal dispatch operations " +
"have failed. The WCS request for the operation '"+s+"' " +
"has been incorrectly routed to the 'GetCapabilities' " +
"request processor.",
WcsException.NO_APPLICABLE_CODE);
}
// Get the identifier for the coverage.
s = kvp.get("coverageId".toLowerCase());
if(s==null){
throw new WcsException("Request is missing required " +
"Coverage 'coverageId'.",
WcsException.MISSING_PARAMETER_VALUE,
"coverageId");
}
_coverageID = Util.stripQuotes(s[0]);
CoverageDescription cvrgDscrpt = CatalogWrapper.getCoverageDescription(_coverageID);
if(cvrgDscrpt==null){
throw new WcsException("No such _coverageID: '"+ _coverageID +"'",
WcsException.INVALID_PARAMETER_VALUE,
"coverageId");
}
// Get the _format. It's not required (defaults to coverage's nativeFormat) and a null is used to indicate that
// it was not specified.
s = kvp.get("format");
_format = s==null? null : s[0];
// Get the _mediaType. It's not required and a null is used to indicate that
// it was not specified. If it is specified it's value MUST BE "multipart/related" and the
// the response MUST be a multipart MIME document with the gml:Coverage document in the first
// part and the second part must contain whatever response _format the user specified in the _format parameter.
s = kvp.get("mediaType".toLowerCase());
if(s!=null){
setMediaType(s[0]);
}
_scaleRequest = new ScaleRequest(kvp,cvrgDscrpt);
// Get the subset expressions
s = kvp.get("subset");
if(s!=null){
for(String subsetStr:s){
DimensionSubset subset = new DimensionSubset(subsetStr);
/**
* THis is the spot where we treat time the same as any other dimension
* (because that's the way the data is)
* While also handling it specially so the we can make the WCS dance.
*/
if(subset.getDimensionId().toLowerCase().contains("time")){
DomainCoordinate timeDomain = cvrgDscrpt.getDomainCoordinate("time");
_temporalSubset = new TemporalDimensionSubset(subset, timeDomain.getUnits());
subset = _temporalSubset;
}
_dimensionSubsets.put(subset.getDimensionId(), subset);
}
}
// Get the range subset expressions
s = kvp.get("RangeSubset".toLowerCase());
if(s!=null) {
for (String rangeSubsetString : s) {
if(!rangeSubsetString.isEmpty())
_rangeSubset = new RangeSubset(rangeSubsetString,cvrgDscrpt.getFields());
}
}
}
/**
* Creates a GetCoverageRequest from the XML submitted in a POST request.
* @param requestUrl
* @param getCoverageRequestElem
* @throws WcsException
* @throws InterruptedException
*/
public GetCoverageRequest(String requestUrl, Element getCoverageRequestElem)
throws WcsException, InterruptedException {
this();
Element e;
String s;
_requestUrl = requestUrl;
// Make sure we got the correct request object.
WCS.checkNamespace(getCoverageRequestElem,"GetCoverage",WCS.WCS_NS);
// Make sure the client is looking for a WCS service....
WCS.checkService(getCoverageRequestElem.getAttributeValue("service"));
// Make sure the client can accept a supported WCS version...
WCS.checkVersion(getCoverageRequestElem.getAttributeValue("version"));
// Get the identifier for the coverage.
e = getCoverageRequestElem.getChild("CoverageId",WCS.WCS_NS);
if(e==null ){
throw new WcsException("Missing required wcs:CoverageId element. ",
WcsException.MISSING_PARAMETER_VALUE,
"wcs:CoverageId");
}
_coverageID = Util.stripQuotes(e.getText());
// This call checks that there is a coverage matching the requested ID and it will
// throw a WcsException if no such coverage is available.
CoverageDescription cvrDsc = CatalogWrapper.getCoverageDescription(_coverageID);
ingestDimensionSubset(getCoverageRequestElem, cvrDsc);
// Get the _format for the coverage output.
Element formatElement = getCoverageRequestElem.getChild("format",WCS.WCS_NS);
if(formatElement!=null){
_format = formatElement.getTextTrim();
}
// Get the _mediaType. It's not required and a null is used to indicate that
// it was not specified. If it is specified it's value MUST BE "multipart/related" and the
// the response MUST be a multipart MIME document with the gml:Coverage document in the first
// part and the second part must contain whatever response _format the user specified in the _format parameter.
Element mediaTypeElement = getCoverageRequestElem.getChild("_mediaType",WCS.WCS_NS);
if(mediaTypeElement!=null){
s = mediaTypeElement.getTextTrim();
setMediaType(s);
}
}
public ScaleRequest getScaleRequest(){
return new ScaleRequest(_scaleRequest);
}
public RangeSubset getRangeSubset(){
return _rangeSubset;
}
public void setMediaType(String mType) throws WcsException {
if(mType!=null && !mType.equalsIgnoreCase("multipart/related")){
throw new WcsException("Optional _mediaType MUST be set to'multipart/related' " +
"No other value is allowed. OGC [09-110r4] section 8.4.1",
WcsException.INVALID_PARAMETER_VALUE,
"_mediaType");
}
_mediaType = mType;
}
public String getMediaType(){
return _mediaType;
}
public String getCoverageID() {
return _coverageID;
}
public String getFormat() {
return _format;
}
public void setFormat(String format) {
this._format = format;
}
public HashMap<String, DimensionSubset> getDimensionSubsets(){
HashMap<String, DimensionSubset> newDS = new HashMap<>();
for(DimensionSubset ds: _dimensionSubsets.values()){
if(ds instanceof TemporalDimensionSubset){
TemporalDimensionSubset ts = (TemporalDimensionSubset)ds;
newDS.put(ts.getDimensionId(),new TemporalDimensionSubset(ts));
}
else {
newDS.put(ds.getDimensionId(),new DimensionSubset(ds));
}
}
return newDS;
}
public void ingestDimensionSubset(Element getCoverageRequestElem, CoverageDescription cvrDsc) throws WcsException {
WCS.checkNamespace(getCoverageRequestElem,"GetCoverage", WCS.WCS_NS);
MultiElementFilter dimensionTypeFilter = new MultiElementFilter("DimensionTrim",WCS.WCS_NS);
dimensionTypeFilter.addTargetElement("DimensionSlice", WCS.WCS_NS);
Iterator dtei = getCoverageRequestElem.getDescendants(dimensionTypeFilter);
while(dtei.hasNext()){
Element dimensionType = (Element) dtei.next();
DimensionSubset ds = new DimensionSubset(dimensionType);
if(ds.getDimensionId().toLowerCase().contains("time")){
DomainCoordinate timeDomain = cvrDsc.getDomainCoordinate("time");
ds = new TemporalDimensionSubset(ds, timeDomain.getUnits());
}
_dimensionSubsets.put(ds.getDimensionId(), ds);
}
}
public Document getRequestDoc()throws WcsException{
return new Document(getRequestElement());
}
public void serialize(OutputStream os) throws IOException, WcsException {
XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat());
xmlo.output(getRequestDoc(), os);
}
public String toString(){
XMLOutputter xmlo = new XMLOutputter(Format.getPrettyFormat());
try {
return xmlo.outputString(getRequestDoc());
} catch (WcsException e) {
WcsExceptionReport er = new WcsExceptionReport(e);
return er.toString();
}
}
public Element getRequestElement() throws WcsException{
Element requestElement;
String schemaLocation;
requestElement = new Element(_request, WCS.WCS_NS);
schemaLocation = WCS.WCS_NAMESPACE_STRING + " "+ WCS.WCS_SCHEMA_LOCATION_BASE+"wcsGetCoverage.xsd ";
//requestElement.addNamespaceDeclaration(WCS.OWS_NS);
//schemaLocation += WCS.OWS_NAMESPACE_STRING + " "+ WCS.OWS_SCHEMA_LOCATION_BASE+"owsAll.xsd ";
//requestElement.addNamespaceDeclaration(WCS.GML_NS);
//schemaLocation += WCS.GML_NAMESPACE_STRING + " "+ WCS.GML_SCHEMA_LOCATION_BASE+"gml.xsd ";
//requestElement.addNamespaceDeclaration(WCS.XSI_NS);
//requestElement.setAttribute("schemaLocation", schemaLocation,WCS.XSI_NS);
requestElement.setAttribute("service",WCS.SERVICE);
requestElement.setAttribute("version",WCS.CURRENT_VERSION);
Element e = new Element("CoverageId",WCS.WCS_NS);
e.setText(_coverageID);
requestElement.addContent(e);
for(DimensionSubset ds: _dimensionSubsets.values()){
requestElement.addContent(ds.getDimensionSubsetElement());
}
if(_format !=null){
Element formatElement = new Element("format",WCS.WCS_NS);
formatElement.setText(_format);
requestElement.addContent(formatElement);
}
if(_mediaType !=null){
Element mediaTypeElement = new Element("mediaType",WCS.WCS_NS);
mediaTypeElement.setText(_mediaType);
requestElement.addContent(mediaTypeElement);
}
return requestElement;
}
public String getRequestUrl(){
return _requestUrl;
}
public TemporalDimensionSubset getTemporalSubset(){
if(_temporalSubset!=null)
return new TemporalDimensionSubset(_temporalSubset);
return null;
}
}
|
package org.apache.xerces.parsers;
import java.io.IOException;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.StringTokenizer;
import org.apache.xerces.dom.TextImpl;
import org.apache.xerces.framework.XMLAttrList;
import org.apache.xerces.framework.XMLContentSpec;
import org.apache.xerces.framework.XMLDocumentHandler;
import org.apache.xerces.framework.XMLParser;
import org.apache.xerces.readers.XMLEntityHandler;
import org.apache.xerces.utils.QName;
import org.apache.xerces.utils.StringPool;
import org.apache.xerces.validators.common.XMLAttributeDecl;
import org.apache.xerces.validators.common.XMLElementDecl;
import org.apache.xerces.validators.schema.XUtil;
import org.apache.xerces.dom.DeferredDocumentImpl;
import org.apache.xerces.dom.DocumentImpl;
import org.apache.xerces.dom.DocumentTypeImpl;
import org.apache.xerces.dom.NodeImpl;
import org.apache.xerces.dom.EntityImpl;
import org.apache.xerces.dom.NotationImpl;
import org.apache.xerces.dom.ElementDefinitionImpl;
import org.apache.xerces.dom.AttrImpl;
import org.apache.xerces.dom.TextImpl;
import org.apache.xerces.dom.ElementImpl;
import org.apache.xerces.dom.EntityImpl;
import org.apache.xerces.dom.EntityReferenceImpl;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.Element;
import org.w3c.dom.Entity;
import org.w3c.dom.EntityReference;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
/**
* DOMParser provides a parser which produces a W3C DOM tree as its output
*
*
* @version $Id$
*/
public class DOMParser
extends XMLParser
implements XMLDocumentHandler
{
// Constants
// public
/** Default programmatic document class name (org.apache.xerces.dom.DocumentImpl). */
public static final String DEFAULT_DOCUMENT_CLASS_NAME = "org.apache.xerces.dom.DocumentImpl";
/** Default deferred document class name (org.apache.xerces.dom.DeferredDocumentImpl). */
public static final String DEFAULT_DEFERRED_DOCUMENT_CLASS_NAME = "org.apache.xerces.dom.DeferredDocumentImpl";
// debugging
/** Set to true to debug attribute list declaration calls. */
private static final boolean DEBUG_ATTLIST_DECL = false;
// features and properties
/** Features recognized by this parser. */
private static final String RECOGNIZED_FEATURES[] = {
// SAX2 core features
// Xerces features
"http://apache.org/xml/features/dom/defer-node-expansion",
"http://apache.org/xml/features/dom/create-entity-ref-nodes",
"http://apache.org/xml/features/dom/include-ignorable-whitespace",
// Experimental features
"http://apache.org/xml/features/domx/grammar-access",
};
/** Properties recognized by this parser. */
private static final String RECOGNIZED_PROPERTIES[] = {
// SAX2 core properties
// Xerces properties
"http://apache.org/xml/properties/dom/document-class-name",
"http://apache.org/xml/properties/dom/current-element-node",
};
// Data
// common data
protected Document fDocument;
// deferred expansion data
protected DeferredDocumentImpl fDeferredDocumentImpl;
protected int fDocumentIndex;
protected int fDocumentTypeIndex;
protected int fCurrentNodeIndex;
// full expansion data
protected DocumentImpl fDocumentImpl;
protected DocumentType fDocumentType;
protected Node fCurrentElementNode;
// state
protected boolean fInDTD;
protected boolean fWithinElement;
protected boolean fInCDATA;
// features
private boolean fGrammarAccess;
// properties
// REVISIT: Even though these have setters and getters, should they
// be protected visibility? -Ac
private String fDocumentClassName;
private boolean fDeferNodeExpansion;
private boolean fCreateEntityReferenceNodes;
private boolean fIncludeIgnorableWhitespace;
// built-in entities
protected int fAmpIndex;
protected int fLtIndex;
protected int fGtIndex;
protected int fAposIndex;
protected int fQuotIndex;
private boolean fSeenRootElement;
private XMLAttrList fAttrList;
// Constructors
/** Default constructor. */
public DOMParser() {
initHandlers(false, this, this);
// setup parser state
init();
// set default values
try {
setDocumentClassName(DEFAULT_DOCUMENT_CLASS_NAME);
setCreateEntityReferenceNodes(true);
setDeferNodeExpansion(true);
setIncludeIgnorableWhitespace(true);
} catch (SAXException e) {
throw new RuntimeException("PAR001 Fatal error constructing DOMParser.");
}
} // <init>()
// Public methods
// document
/** Returns the document. */
public Document getDocument() {
if (fDocumentImpl != null) {
fDocumentImpl.setErrorChecking(true);
}
return fDocument;
}
// features and properties
/**
* Returns a list of features that this parser recognizes.
* This method will never return null; if no features are
* recognized, this method will return a zero length array.
*
* @see #isFeatureRecognized
* @see #setFeature
* @see #getFeature
*/
public String[] getFeaturesRecognized() {
// get features that super/this recognizes
String superRecognized[] = super.getFeaturesRecognized();
String thisRecognized[] = RECOGNIZED_FEATURES;
// is one or the other the empty set?
int thisLength = thisRecognized.length;
if (thisLength == 0) {
return superRecognized;
}
int superLength = superRecognized.length;
if (superLength == 0) {
return thisRecognized;
}
// combine the two lists and return
String recognized[] = new String[superLength + thisLength];
System.arraycopy(superRecognized, 0, recognized, 0, superLength);
System.arraycopy(thisRecognized, 0, recognized, superLength, thisLength);
return recognized;
} // getFeaturesRecognized():String[]
/**
* Returns a list of properties that this parser recognizes.
* This method will never return null; if no properties are
* recognized, this method will return a zero length array.
*
* @see #isPropertyRecognized
* @see #setProperty
* @see #getProperty
*/
public String[] getPropertiesRecognized() {
// get properties that super/this recognizes
String superRecognized[] = super.getPropertiesRecognized();
String thisRecognized[] = RECOGNIZED_PROPERTIES;
// is one or the other the empty set?
int thisLength = thisRecognized.length;
if (thisLength == 0) {
return superRecognized;
}
int superLength = superRecognized.length;
if (superLength == 0) {
return thisRecognized;
}
// combine the two lists and return
String recognized[] = new String[superLength + thisLength];
System.arraycopy(superRecognized, 0, recognized, 0, superLength);
System.arraycopy(thisRecognized, 0, recognized, superLength, thisLength);
return recognized;
}
// resetting
/** Resets the parser. */
public void reset() throws Exception {
super.reset();
init();
}
/** Resets or copies the parser. */
public void resetOrCopy() throws Exception {
super.resetOrCopy();
init();
}
// Protected methods
// initialization
/**
* Initializes the parser to a pre-parse state. This method is
* called between calls to <code>parse()</code>.
*/
protected void init() {
// init common
fDocument = null;
// init deferred expansion
fDeferredDocumentImpl = null;
fDocumentIndex = -1;
fDocumentTypeIndex = -1;
fCurrentNodeIndex = -1;
// init full expansion
fDocumentImpl = null;
fDocumentType = null;
fCurrentElementNode = null;
// state
fInDTD = false;
fWithinElement = false;
fInCDATA = false;
// built-in entities
fAmpIndex = fStringPool.addSymbol("amp");
fLtIndex = fStringPool.addSymbol("lt");
fGtIndex = fStringPool.addSymbol("gt");
fAposIndex = fStringPool.addSymbol("apos");
fQuotIndex = fStringPool.addSymbol("quot");
fSeenRootElement = false;
fAttrList = new XMLAttrList(fStringPool);
} // init()
// features
/**
* This method sets whether the expansion of the nodes in the default
* DOM implementation are deferred.
*
* @see #getDeferNodeExpansion
* @see #setDocumentClassName
*/
protected void setDeferNodeExpansion(boolean deferNodeExpansion)
throws SAXNotRecognizedException, SAXNotSupportedException {
fDeferNodeExpansion = deferNodeExpansion;
}
/**
* Returns true if the expansion of the nodes in the default DOM
* implementation are deferred.
*
* @see #setDeferNodeExpansion
*/
protected boolean getDeferNodeExpansion()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fDeferNodeExpansion;
}
/**
* This feature determines whether entity references within
* the document are included in the document tree as
* EntityReference nodes.
* <p>
* Note: The children of the entity reference are always
* added to the document. This feature only affects
* whether an EntityReference node is also included
* as the parent of the entity reference children.
*
* @param create True to create entity reference nodes; false
* to only insert the entity reference children.
*
* @see #getCreateEntityReferenceNodes
*/
protected void setCreateEntityReferenceNodes(boolean create)
throws SAXNotRecognizedException, SAXNotSupportedException {
fCreateEntityReferenceNodes = create;
}
/**
* Returns true if entity references within the document are
* included in the document tree as EntityReference nodes.
*
* @see #setCreateEntityReferenceNodes
*/
public boolean getCreateEntityReferenceNodes()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fCreateEntityReferenceNodes;
}
/**
* This feature determines whether text nodes that can be
* considered "ignorable whitespace" are included in the DOM
* tree.
* <p>
* Note: The only way that the parser can determine if text
* is ignorable is by reading the associated grammar
* and having a content model for the document. When
* ignorable whitespace text nodes *are* included in
* the DOM tree, they will be flagged as ignorable.
* The ignorable flag can be queried by calling the
* TextImpl#isIgnorableWhitespace():boolean method.
*
* @param include True to include ignorable whitespace text nodes;
* false to not include ignorable whitespace text
* nodes.
*
* @see #getIncludeIgnorableWhitespace
*/
public void setIncludeIgnorableWhitespace(boolean include)
throws SAXNotRecognizedException, SAXNotSupportedException {
fIncludeIgnorableWhitespace = include;
}
/**
* Returns true if ignorable whitespace text nodes are included
* in the DOM tree.
*
* @see #setIncludeIgnorableWhitespace
*/
public boolean getIncludeIgnorableWhitespace()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fIncludeIgnorableWhitespace;
}
// properties
/**
* This method allows the programmer to decide which document
* factory to use when constructing the DOM tree. However, doing
* so will lose the functionality of the default factory. Also,
* a document class other than the default will lose the ability
* to defer node expansion on the DOM tree produced.
*
* @param documentClassName The fully qualified class name of the
* document factory to use when constructing
* the DOM tree.
*
* @see #getDocumentClassName
* @see #setDeferNodeExpansion
* @see #DEFAULT_DOCUMENT_CLASS_NAME
*/
protected void setDocumentClassName(String documentClassName)
throws SAXNotRecognizedException, SAXNotSupportedException {
// normalize class name
if (documentClassName == null) {
documentClassName = DEFAULT_DOCUMENT_CLASS_NAME;
}
// verify that this class exists and is of the right type
try {
Class _class = Class.forName(documentClassName);
//if (!_class.isAssignableFrom(Document.class)) {
if (!Document.class.isAssignableFrom(_class)) {
throw new IllegalArgumentException("PAR002 Class, \""+documentClassName+"\", is not of type org.w3c.dom.Document."+"\n"+documentClassName);
}
}
catch (ClassNotFoundException e) {
throw new IllegalArgumentException("PAR003 Class, \""+documentClassName+"\", not found."+"\n"+documentClassName);
}
// set document class name
fDocumentClassName = documentClassName;
if (!documentClassName.equals(DEFAULT_DOCUMENT_CLASS_NAME)) {
setDeferNodeExpansion(false);
}
} // setDocumentClassName(String)
/**
* Returns the fully qualified class name of the document factory
* used when constructing the DOM tree.
*
* @see #setDocumentClassName
*/
protected String getDocumentClassName()
throws SAXNotRecognizedException, SAXNotSupportedException {
return fDocumentClassName;
}
/**
* Returns the current element node.
* <p>
* Note: This method is not supported when the "deferNodeExpansion"
* property is set to true and the document factory is set to
* the default factory.
*/
protected Element getCurrentElementNode()
throws SAXNotRecognizedException, SAXNotSupportedException {
if (fCurrentElementNode != null &&
fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
return (Element)fCurrentElementNode;
}
return null;
} // getCurrentElementNode():Element
// Configurable methods
/**
* Set the state of any feature in a SAX2 parser. The parser
* might not recognize the feature, and if it does recognize
* it, it might not be able to fulfill the request.
*
* @param featureId The unique identifier (URI) of the feature.
* @param state The requested state of the feature (true or false).
*
* @exception SAXNotRecognizedException If the requested feature is
* not known.
* @exception SAXNotSupportedException If the requested feature is
* known, but the requested state
* is not supported.
*/
public void setFeature(String featureId, boolean state)
throws SAXNotRecognizedException, SAXNotSupportedException {
// SAX2 core features
if (featureId.startsWith(SAX2_FEATURES_PREFIX)) {
// No additional SAX properties defined for DOMParser.
// Pass request off to XMLParser for the common cases.
}
// Xerces features
else if (featureId.startsWith(XERCES_FEATURES_PREFIX)) {
String feature = featureId.substring(XERCES_FEATURES_PREFIX.length());
// Allows the document tree returned by getDocument()
// to be constructed lazily. In other words, the DOM
// nodes are constructed as the tree is traversed.
// This allows the document to be returned sooner with
// the expense of holding all of the blocks of character
// data held in memory. Then again, lots of DOM nodes
// use a lot of memory as well.
if (feature.equals("dom/defer-node-expansion")) {
if (fParseInProgress) {
throw new SAXNotSupportedException("PAR004 Cannot setFeature("+featureId + "): parse is in progress."+"\n"+featureId);
}
setDeferNodeExpansion(state);
return;
}
// This feature determines whether entity references within
// the document are included in the document tree as
// EntityReference nodes.
// Note: The children of the entity reference are always
// added to the document. This feature only affects
// whether an EntityReference node is also included
// as the parent of the entity reference children.
if (feature.equals("dom/create-entity-ref-nodes")) {
setCreateEntityReferenceNodes(state);
return;
}
// This feature determines whether text nodes that can be
// considered "ignorable whitespace" are included in the DOM
// tree.
// Note: The only way that the parser can determine if text
// is ignorable is by reading the associated grammar
// and having a content model for the document. When
// ignorable whitespace text nodes *are* included in
// the DOM tree, they will be flagged as ignorable.
// The ignorable flag can be queried by calling the
// TextImpl#isIgnorableWhitespace():boolean method.
if (feature.equals("dom/include-ignorable-whitespace")) {
setIncludeIgnorableWhitespace(state);
return;
}
// Experimental features
// Allows grammar access in the DOM tree. Currently, this
// means that there is an XML Schema document tree as a
// child of the Doctype node.
if (feature.equals("domx/grammar-access")) {
fGrammarAccess = state;
return;
}
// Pass request off to XMLParser for the common cases.
}
// Pass request off to XMLParser for the common cases.
super.setFeature(featureId, state);
} // setFeature(String,boolean)
/**
* Query the current state of any feature in a SAX2 parser. The
* parser might not recognize the feature.
*
* @param featureId The unique identifier (URI) of the feature
* being set.
*
* @return The current state of the feature.
*
* @exception SAXNotRecognizedException If the requested feature is
* not known.
*/
public boolean getFeature(String featureId)
throws SAXNotRecognizedException, SAXNotSupportedException {
// SAX2 core features
if (featureId.startsWith(SAX2_FEATURES_PREFIX)) {
// No additional SAX properties defined for DOMParser.
// Pass request off to XMLParser for the common cases.
}
// Xerces features
else if (featureId.startsWith(XERCES_FEATURES_PREFIX)) {
String feature = featureId.substring(XERCES_FEATURES_PREFIX.length());
// Allows the document tree returned by getDocument()
// to be constructed lazily. In other words, the DOM
// nodes are constructed as the tree is traversed.
// This allows the document to be returned sooner with
// the expense of holding all of the blocks of character
// data held in memory. Then again, lots of DOM nodes
// use a lot of memory as well.
if (feature.equals("dom/defer-node-expansion")) {
return getDeferNodeExpansion();
}
// This feature determines whether entity references within
// the document are included in the document tree as
// EntityReference nodes.
// Note: The children of the entity reference are always
// added to the document. This feature only affects
// whether an EntityReference node is also included
// as the parent of the entity reference children.
else if (feature.equals("dom/create-entity-ref-nodes")) {
return getCreateEntityReferenceNodes();
}
// This feature determines whether text nodes that can be
// considered "ignorable whitespace" are included in the DOM
// tree.
// Note: The only way that the parser can determine if text
// is ignorable is by reading the associated grammar
// and having a content model for the document. When
// ignorable whitespace text nodes *are* included in
// the DOM tree, they will be flagged as ignorable.
// The ignorable flag can be queried by calling the
// TextImpl#isIgnorableWhitespace():boolean method.
if (feature.equals("dom/include-ignorable-whitespace")) {
return getIncludeIgnorableWhitespace();
}
// Experimental features
// Allows grammar access in the DOM tree. Currently, this
// means that there is an XML Schema document tree as a
// child of the Doctype node.
if (feature.equals("domx/grammar-access")) {
return fGrammarAccess;
}
// Pass request off to XMLParser for the common cases.
}
// Pass request off to XMLParser for the common cases.
return super.getFeature(featureId);
} // getFeature(String):boolean
/**
* Set the value of any property in a SAX2 parser. The parser
* might not recognize the property, and if it does recognize
* it, it might not support the requested value.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
* @param Object The value to which the property is being set.
*
* @exception SAXNotRecognizedException If the requested property is
* not known.
* @exception SAXNotSupportedException If the requested property is
* known, but the requested
* value is not supported.
*/
public void setProperty(String propertyId, Object value)
throws SAXNotRecognizedException, SAXNotSupportedException {
// Xerces properties
if (propertyId.startsWith(XERCES_PROPERTIES_PREFIX)) {
String property = propertyId.substring(XERCES_PROPERTIES_PREFIX.length());
// Returns the current element node as the DOM Parser is
// parsing. This property is useful for determining the
// relative location of the document when an error is
// encountered. Note that this feature does *not* work
// is set to true.
if (property.equals("dom/current-element-node")) {
throw new SAXNotSupportedException("PAR005 Property, \""+propertyId+"\" is read-only.\n"+propertyId);
}
// This property can be used to set/query the name of the
// document factory.
else if (property.equals("dom/document-class-name")) {
if (value != null && !(value instanceof String)) {
throw new SAXNotSupportedException("PAR006 Property value must be of type java.lang.String.");
}
setDocumentClassName((String)value);
return;
}
}
// Pass request off to XMLParser for the common cases.
super.setProperty(propertyId, value);
} // setProperty(String,Object)
/**
* Return the current value of a property in a SAX2 parser.
* The parser might not recognize the property.
*
* @param propertyId The unique identifier (URI) of the property
* being set.
*
* @return The current value of the property.
*
* @exception SAXNotRecognizedException If the requested property is
* not known.
*
* @see Configurable#getProperty
*/
public Object getProperty(String propertyId)
throws SAXNotRecognizedException, SAXNotSupportedException {
// Xerces properties
if (propertyId.startsWith(XERCES_PROPERTIES_PREFIX)) {
String property = propertyId.substring(XERCES_PROPERTIES_PREFIX.length());
// Returns the current element node as the DOM Parser is
// parsing. This property is useful for determining the
// relative location of the document when an error is
// encountered. Note that this feature does *not* work
// is set to true.
if (property.equals("dom/current-element-node")) {
boolean throwException = false;
try {
throwException = getFeature(XERCES_FEATURES_PREFIX+"dom/defer-node-expansion");
}
catch (SAXNotSupportedException e) {
// ignore
}
catch (SAXNotRecognizedException e) {
// ignore
}
if (throwException) {
throw new SAXNotSupportedException("PAR007 Current element node cannot be queried when node expansion is deferred.");
}
return getCurrentElementNode();
}
// This property can be used to set/query the name of the
// document factory.
else if (property.equals("dom/document-class-name")) {
return getDocumentClassName();
}
}
// Pass request off to XMLParser for the common cases.
return super.getProperty(propertyId);
} // getProperty(String):Object
// XMLParser methods
/** Start document. */
public void startDocument() {
// deferred expansion
String documentClassName = null;
try {
documentClassName = getDocumentClassName();
} catch (SAXException e) {
throw new RuntimeException("PAR008 Fatal error getting document factory.");
}
boolean deferNodeExpansion = true;
try {
deferNodeExpansion = getDeferNodeExpansion();
} catch (SAXException e) {
throw new RuntimeException("PAR009 Fatal error reading expansion mode.");
}
try {
Class docClass = Class.forName(documentClassName);
Class defaultDeferredDocClass = Class.forName(DEFAULT_DEFERRED_DOCUMENT_CLASS_NAME);
if (deferNodeExpansion && docClass.isAssignableFrom(defaultDeferredDocClass)) {
boolean nsEnabled = false;
try { nsEnabled = getNamespaces(); }
catch (SAXException s) {}
fDocument = fDeferredDocumentImpl =
new DeferredDocumentImpl(fStringPool, nsEnabled, fGrammarAccess);
fDocumentIndex = fDeferredDocumentImpl.createDocument();
fCurrentNodeIndex = fDocumentIndex;
}
// full expansion
else {
Class defaultDocClass = Class.forName(DEFAULT_DOCUMENT_CLASS_NAME);
if (docClass.isAssignableFrom(defaultDocClass)) {
fDocument = fDocumentImpl = new DocumentImpl(fGrammarAccess);
fDocumentImpl.setErrorChecking(false);
}
else {
try {
Class documentClass = Class.forName(documentClassName);
fDocument = (Document)documentClass.newInstance();
}
catch (Exception e) {
// REVISIT: We've already checked the type of the factory
// in the setDocumentClassName() method. The only
// exception that can occur here is if the class
// doesn't have a zero-arg constructor. -Ac
}
}
fCurrentElementNode = fDocument;
}
}
catch (ClassNotFoundException e) {
// REVISIT: Localize this message.
throw new RuntimeException(documentClassName);
}
} // startDocument()
/** End document. */
public void endDocument() throws Exception {}
/** XML declaration. */
public void xmlDecl(int versionIndex, int encodingIndex, int standaloneIndex) throws Exception {
// release strings
fStringPool.releaseString(versionIndex);
fStringPool.releaseString(encodingIndex);
fStringPool.releaseString(standaloneIndex);
}
/** Text declaration. */
public void textDecl(int versionIndex, int encodingIndex) throws Exception {
// release strings
fStringPool.releaseString(versionIndex);
fStringPool.releaseString(encodingIndex);
}
/** Report the start of the scope of a namespace declaration. */
public void startNamespaceDeclScope(int prefix, int uri) throws Exception {}
/** Report the end of the scope of a namespace declaration. */
public void endNamespaceDeclScope(int prefix) throws Exception {}
/** Start element. */
public void startElement(QName elementQName,
XMLAttrList xmlAttrList, int attrListIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
int element =
fDeferredDocumentImpl.createElement(elementQName.rawname,
elementQName.uri,
xmlAttrList,
attrListIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, element);
fCurrentNodeIndex = element;
fWithinElement = true;
// identifier registration
int index = xmlAttrList.getFirstAttr(attrListIndex);
while (index != -1) {
if (xmlAttrList.getAttType(index) == fStringPool.addSymbol("ID")) {
int nameIndex = xmlAttrList.getAttValue(index);
fDeferredDocumentImpl.putIdentifier(nameIndex, element);
}
index = xmlAttrList.getNextAttr(index);
}
// copy schema grammar, if needed
if (!fSeenRootElement) {
fSeenRootElement = true;
if (fDocumentTypeIndex == -1) {
fDocumentTypeIndex = fDeferredDocumentImpl.createDocumentType(elementQName.rawname, -1, -1);
fDeferredDocumentImpl.appendChild(0, fDocumentTypeIndex);
}
if (fGrammarAccess) {
// REVISIT: How do we know which grammar is in use?
//Document schemaDocument = fValidator.getSchemaDocument();
int size = fGrammarResolver.size();
if (size > 0) {
Enumeration schemas = fGrammarResolver.nameSpaceKeys();
Document schemaDocument = fGrammarResolver.getGrammar((String)schemas.nextElement()).getGrammarDocument();
if (schemaDocument != null) {
Element schema = schemaDocument.getDocumentElement();
copyInto(schema, fDocumentTypeIndex);
}
}
}
}
}
// full expansion
else {
boolean nsEnabled = false;
try { nsEnabled = getNamespaces(); }
catch (SAXException s) {}
String elementName = fStringPool.toString(elementQName.rawname);
Element e;
if (nsEnabled) {
e = fDocument.createElementNS(
// REVISIT: Make sure uri is filled in by caller.
fStringPool.toString(elementQName.uri),
fStringPool.toString(elementQName.localpart)
);
} else {
e = fDocument.createElement(elementName);
}
int attrHandle = xmlAttrList.getFirstAttr(attrListIndex);
while (attrHandle != -1) {
int attName = xmlAttrList.getAttrName(attrHandle);
String attrName = fStringPool.toString(attName);
String attrValue =
fStringPool.toString(xmlAttrList.getAttValue(attrHandle));
if (nsEnabled) {
int nsURIIndex = xmlAttrList.getAttrURI(attrHandle);
String namespaceURI = fStringPool.toString(nsURIIndex);
// DOM Level 2 wants all namespace declaration attributes
// So as long as the XML parser doesn't do it, it needs to
// done here.
int prefixIndex = xmlAttrList.getAttrPrefix(attrHandle);
String prefix = fStringPool.toString(prefixIndex);
if (namespaceURI == null) {
if (prefix != null) {
if (prefix.equals("xmlns")) {
namespaceURI = "http:
}
} else if (attrName.equals("xmlns")) {
namespaceURI = "http:
}
}
e.setAttributeNS(namespaceURI, attrName, attrValue);
} else {
e.setAttribute(attrName, attrValue);
}
if (!xmlAttrList.isSpecified(attrHandle)) {
((AttrImpl)e.getAttributeNode(attrName))
.setSpecified(false);
}
attrHandle = xmlAttrList.getNextAttr(attrHandle);
}
fCurrentElementNode.appendChild(e);
fCurrentElementNode = e;
fWithinElement = true;
// identifier registration
if (fDocumentImpl != null) {
int index = xmlAttrList.getFirstAttr(attrListIndex);
while (index != -1) {
if (xmlAttrList.getAttType(index) == fStringPool.addSymbol("ID")) {
String name = fStringPool.toString(xmlAttrList.getAttValue(index));
fDocumentImpl.putIdentifier(name, e);
}
index = xmlAttrList.getNextAttr(index);
}
}
xmlAttrList.releaseAttrList(attrListIndex);
// copy schema grammar, if needed
if (!fSeenRootElement) {
fSeenRootElement = true;
if (fDocumentImpl != null) {
if (fDocumentType == null) {
String rootName = elementName;
String systemId = ""; // REVISIT: How do we get this value? -Ac
String publicId = ""; // REVISIT: How do we get this value? -Ac
fDocumentType = fDocumentImpl.createDocumentType(rootName, publicId, systemId);
fDocument.appendChild(fDocumentType);
// REVISIT: We could use introspection to get the
// DOMImplementation#createDocumentType method
// for DOM Level 2 implementations. The only
// problem is that the owner document for the
// node created is null. How does it get set
// for document when appended? A cursory look
// at the DOM Level 2 CR didn't yield any
// information. -Ac
}
if (fGrammarAccess) {
if (fGrammarResolver.size() > 0) {
Enumeration schemas = fGrammarResolver.nameSpaceKeys();
Document schemaDocument = fGrammarResolver.getGrammar((String)schemas.nextElement()).getGrammarDocument();
if (schemaDocument != null) {
Element schema = schemaDocument.getDocumentElement();
XUtil.copyInto(schema, fDocumentType);
}
}
}
}
}
}
} // startElement(QName,XMLAttrList,int)
/** End element. */
public void endElement(QName elementQName)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
fCurrentNodeIndex = fDeferredDocumentImpl.getParentNode(fCurrentNodeIndex, false);
fWithinElement = false;
}
// full node expansion
else {
fCurrentElementNode = fCurrentElementNode.getParentNode();
fWithinElement = false;
}
} // endElement(QName)
/** Characters. */
public void characters(int dataIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int text;
if (fInCDATA) {
text = fDeferredDocumentImpl.createCDATASection(dataIndex, false);
} else {
// The Text normalization is taken care of within the Text Node
// in the DEFERRED case.
text = fDeferredDocumentImpl.createTextNode(dataIndex, false);
}
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, text);
}
// full node expansion
else {
Text text;
if (fInCDATA) {
text = fDocument.createCDATASection(fStringPool.orphanString(dataIndex));
}
else {
if (fWithinElement && fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
Node lastChild = fCurrentElementNode.getLastChild();
if (lastChild != null
&& lastChild.getNodeType() == Node.TEXT_NODE) {
// Normalization of Text Nodes - append rather than create.
((Text)lastChild).appendData(fStringPool.orphanString(dataIndex));
return;
}
}
text = fDocument.createTextNode(fStringPool.orphanString(dataIndex));
}
fCurrentElementNode.appendChild(text);
}
} // characters(int)
/** Ignorable whitespace. */
public void ignorableWhitespace(int dataIndex) throws Exception {
// ignore the whitespace
if (!fIncludeIgnorableWhitespace) {
fStringPool.orphanString(dataIndex);
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int text;
if (fInCDATA) {
text = fDeferredDocumentImpl.createCDATASection(dataIndex, true);
} else {
// The Text normalization is taken care of within the Text Node
// in the DEFERRED case.
text = fDeferredDocumentImpl.createTextNode(dataIndex, true);
}
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, text);
}
// full node expansion
else {
Text text;
if (fInCDATA) {
text = fDocument.createCDATASection(fStringPool.orphanString(dataIndex));
}
else {
if (fWithinElement && fCurrentElementNode.getNodeType() == Node.ELEMENT_NODE) {
Node lastChild = fCurrentElementNode.getLastChild();
if (lastChild != null
&& lastChild.getNodeType() == Node.TEXT_NODE) {
// Normalization of Text Nodes - append rather than create.
((Text)lastChild).appendData(fStringPool.orphanString(dataIndex));
return;
}
}
text = fDocument.createTextNode(fStringPool.orphanString(dataIndex));
}
if (fDocumentImpl != null) {
((TextImpl)text).setIgnorableWhitespace(true);
}
fCurrentElementNode.appendChild(text);
}
} // ignorableWhitespace(int)
/** Processing instruction. */
public void processingInstruction(int targetIndex, int dataIndex)
throws Exception {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int pi = fDeferredDocumentImpl.createProcessingInstruction(targetIndex, dataIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, pi);
}
// full node expansion
else {
String target = fStringPool.orphanString(targetIndex);
String data = fStringPool.orphanString(dataIndex);
ProcessingInstruction pi = fDocument.createProcessingInstruction(target, data);
fCurrentElementNode.appendChild(pi);
}
} // processingInstruction(int,int)
/** Comment. */
public void comment(int dataIndex) throws Exception {
if (fInDTD && !fGrammarAccess) {
fStringPool.orphanString(dataIndex);
}
else {
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int comment = fDeferredDocumentImpl.createComment(dataIndex);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, comment);
}
// full node expansion
else {
Comment comment = fDocument.createComment(fStringPool.orphanString(dataIndex));
fCurrentElementNode.appendChild(comment);
}
}
} // comment(int)
/** Not called. */
public void characters(char ch[], int start, int length) throws Exception {}
/** Not called. */
public void ignorableWhitespace(char ch[], int start, int length) throws Exception {}
// XMLDocumentScanner methods
/** Start CDATA section. */
public void startCDATA() throws Exception {
fInCDATA = true;
}
/** End CDATA section. */
public void endCDATA() throws Exception {
fInCDATA = false;
}
// XMLEntityHandler methods
/** Start entity reference. */
public void startEntityReference(int entityName, int entityType,
int entityContext) throws Exception {
// are we ignoring entity reference nodes?
if (!fCreateEntityReferenceNodes) {
return;
}
// ignore built-in entities
if (entityName == fAmpIndex ||
entityName == fGtIndex ||
entityName == fLtIndex ||
entityName == fAposIndex ||
entityName == fQuotIndex) {
return;
}
// we only support one context for entity references right now...
if (entityContext != XMLEntityHandler.ENTITYREF_IN_CONTENT) {
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
int entityRefIndex = fDeferredDocumentImpl.createEntityReference(entityName);
fDeferredDocumentImpl.appendChild(fCurrentNodeIndex, entityRefIndex);
fCurrentNodeIndex = entityRefIndex;
}
// full node expansion
else {
EntityReference er =
fDocument.createEntityReference(fStringPool.toString(entityName));
fCurrentElementNode.appendChild(er);
fCurrentElementNode = er;
try {
EntityReferenceImpl xer = (EntityReferenceImpl) er;
xer.setReadOnly(false, false);
} catch (Exception e) {
// we aren't building against Xerces - do nothing
}
}
} // startEntityReference(int,int,int)
/** End entity reference. */
public void endEntityReference(int entityName, int entityType,
int entityContext) throws Exception {
// are we ignoring entity reference nodes?
if (!fCreateEntityReferenceNodes) {
return;
}
// ignore built-in entities
if (entityName == fAmpIndex ||
entityName == fGtIndex ||
entityName == fLtIndex ||
entityName == fAposIndex ||
entityName == fQuotIndex) {
return;
}
// we only support one context for entity references right now...
if (entityContext != XMLEntityHandler.ENTITYREF_IN_CONTENT) {
return;
}
// deferred node expansion
if (fDeferredDocumentImpl != null) {
String name = fStringPool.toString(entityName);
int erChild = fCurrentNodeIndex;
fCurrentNodeIndex = fDeferredDocumentImpl.getParentNode(erChild, false);
// should never be true - we should not return here.
if (fDeferredDocumentImpl.getNodeType(erChild, false) != Node.ENTITY_REFERENCE_NODE) return;
erChild = fDeferredDocumentImpl.getLastChild(erChild, false); // first Child of EntityReference
if (fDocumentTypeIndex != -1) {
// find Entity decl for this EntityReference.
int entityDecl = fDeferredDocumentImpl.getLastChild(fDocumentTypeIndex, false);
while (entityDecl != -1) {
if (fDeferredDocumentImpl.getNodeType(entityDecl, false) == Node.ENTITY_NODE
&& fDeferredDocumentImpl.getNodeNameString(entityDecl, false).equals(name)) // string compare...
{
break;
}
entityDecl = fDeferredDocumentImpl.getPrevSibling(entityDecl, false);
}
if (entityDecl != -1
&& fDeferredDocumentImpl.getLastChild(entityDecl, false) == -1) {
// found entityDecl with same name as this reference
// AND it doesn't have any children.
// we don't need to iterate, because the whole structure
// should already be connected to the 1st child.
fDeferredDocumentImpl.setAsLastChild(entityDecl, erChild);
}
}
}
// full node expansion
else {
Node erNode = fCurrentElementNode;//fCurrentElementNode.getParentNode();
fCurrentElementNode = erNode.getParentNode();
try {
EntityReferenceImpl xer = (EntityReferenceImpl) erNode;
xer.setReadOnly(false, false);
// if necessary populate the related entity now
if (fDocumentImpl != null) {
NamedNodeMap entities = fDocumentType.getEntities();
String name = fStringPool.toString(entityName);
Node entityNode = entities.getNamedItem(name);
// We could simply return here if there is no entity for
// the reference or if the entity is already populated.
if (entityNode == null || entityNode.hasChildNodes()) {
return;
}
EntityImpl entity = (EntityImpl) entityNode;
entity.setReadOnly(false, false);
for (Node child = erNode.getFirstChild();
child != null;
child = child.getNextSibling()) {
Node childClone = child.cloneNode(true);
entity.appendChild(childClone);
}
entity.setReadOnly(true, true);
}
} catch (Exception e) {
// we aren't building against Xerces - do nothing
}
}
} // endEntityReference(int,int,int)
// DTDValidator.EventHandler methods
/**
* This function will be called when a <!DOCTYPE...> declaration is
* encountered.
*/
public void startDTD(QName rootElement, int publicId, int systemId)
throws Exception {
fInDTD = true;
// full expansion
if (fDocumentImpl != null) {
String rootElementName = fStringPool.toString(rootElement.rawname);
String publicString = fStringPool.toString(publicId);
String systemString = fStringPool.toString(systemId);
fDocumentType = fDocumentImpl.
createDocumentType(rootElementName, publicString, systemString);
fDocumentImpl.appendChild(fDocumentType);
if (fGrammarAccess) {
Element schema = fDocument.createElement("schema");
// REVISIT: What should the namespace be? -Ac
schema.setAttribute("xmlns", "http:
((AttrImpl)schema.getAttributeNode("xmlns")).setSpecified(false);
schema.setAttribute("finalDefault", "");
((AttrImpl)schema.getAttributeNode("finalDefault")).setSpecified(false);
schema.setAttribute("exactDefault", "");
((AttrImpl)schema.getAttributeNode("exactDefault")).setSpecified(false);
fDocumentType.appendChild(schema);
fCurrentElementNode = schema;
}
}
// deferred expansion
else if (fDeferredDocumentImpl != null) {
fDocumentTypeIndex =
fDeferredDocumentImpl.
createDocumentType(rootElement.rawname, publicId, systemId);
fDeferredDocumentImpl.appendChild(fDocumentIndex, fDocumentTypeIndex);
if (fGrammarAccess) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("xmlns"),
fStringPool.addString("http:
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("finalDefault"),
fStringPool.addString(""),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("exactDefault"),
fStringPool.addString(""),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.endAttrList();
int schemaIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("schema"), fAttrList, handle);
// REVISIT: What should the namespace be? -Ac
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, schemaIndex);
fCurrentNodeIndex = schemaIndex;
}
}
} // startDTD(int,int,int)
/**
* Supports DOM Level 2 internalSubset additions.
* Called when the internal subset is completely scanned.
*/
public void internalSubset(int internalSubset) {
//System.out.println("internalSubset callback:"+fStringPool.toString(internalSubset));
// full expansion
if (fDocumentImpl != null && fDocumentType != null) {
((DocumentTypeImpl)fDocumentType).setInternalSubset(fStringPool.toString(internalSubset));
}
// deferred expansion
else if (fDeferredDocumentImpl != null) {
fDeferredDocumentImpl.setInternalSubset(fDocumentTypeIndex, internalSubset);
}
}
/**
* This function will be called at the end of the DTD.
*/
public void endDTD() throws Exception {
fInDTD = false;
if (fGrammarAccess) {
if (fDocumentImpl != null) {
fCurrentElementNode = fDocumentImpl;
}
else if (fDeferredDocumentImpl != null) {
fCurrentNodeIndex = 0;
}
}
} // endDTD()
/**
* <!ELEMENT Name contentspec>
*/
public void elementDecl(QName elementDecl,
int contentSpecType,
int contentSpecIndex,
XMLContentSpec.Provider contentSpecProvider) throws Exception {
if (DEBUG_ATTLIST_DECL) {
String contentModel = XMLContentSpec.toString(contentSpecProvider, fStringPool, contentSpecIndex);
System.out.println("elementDecl(" + fStringPool.toString(elementDecl.rawname) + ", " +
contentModel + ")");
}
// Create element declaration
if (fGrammarAccess) {
if (fDeferredDocumentImpl != null) {
// Build element
// get element declaration; create if necessary
int schemaIndex = getLastChildElement(fDocumentTypeIndex, "schema");
String elementName = fStringPool.toString(elementDecl.rawname);
int elementIndex = getLastChildElement(schemaIndex, "element", "name", elementName);
if (elementIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(elementName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"), // name
fStringPool.addString("1"), // value
fStringPool.addSymbol("NMTOKEN"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("nullable"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("abstract"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("final"), // name
fStringPool.addString("false"), // value
fStringPool.addSymbol("ENUMERATION"), // type
false, // specified
false); // search
fAttrList.endAttrList();
elementIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, elementIndex);
}
// Build content model
// get type element; create if necessary
int typeIndex = getLastChildElement(elementIndex, "complexType");
if (typeIndex == -1 && contentSpecType != XMLElementDecl.TYPE_MIXED) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("complexType"), null, -1);
// REVISIT: Check for type redeclaration? -Ac
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getFirstChildElement(elementIndex));
}
// create models
switch (contentSpecType) {
case XMLElementDecl.TYPE_EMPTY: {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("empty"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
break;
}
case XMLElementDecl.TYPE_ANY: {
int anyIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("any"), null, -1);
fDeferredDocumentImpl.insertBefore(typeIndex, anyIndex, getFirstChildElement(typeIndex));
break;
}
case XMLElementDecl.TYPE_MIXED: {
XMLContentSpec contentSpec = new XMLContentSpec();
contentSpecProvider.getContentSpec(contentSpecIndex, contentSpec);
contentSpecIndex = contentSpec.value;
if (contentSpecIndex == -1) {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("type"), fStringPool.addString("string"), true);
fDeferredDocumentImpl.setAttributeNode(elementIndex, attributeIndex);
}
else {
if (typeIndex == -1) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("complexType"), null, -1);
// REVISIT: Check for type redeclaration? -Ac
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getFirstChildElement(elementIndex));
}
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("mixed"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("unbounded"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
int choiceIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("choice"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(typeIndex, choiceIndex);
while (contentSpecIndex != -1) {
// get node
contentSpecProvider.getContentSpec(contentSpecIndex, contentSpec);
int type = contentSpec.type;
int left = contentSpec.value;
int right = contentSpec.otherValue;
// if leaf, skip "#PCDATA" and stop
if (type == XMLContentSpec.CONTENTSPECNODE_LEAF) {
break;
}
// add right hand leaf
contentSpecProvider.getContentSpec(right, contentSpec);
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("ref"),
fStringPool.addString(fStringPool.toString(contentSpec.value)),
fStringPool.addSymbol("NMTOKEN"),
true,
false); //search
fAttrList.endAttrList();
int rightIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
int refIndex = getFirstChildElement(choiceIndex);
fDeferredDocumentImpl.insertBefore(choiceIndex, rightIndex, refIndex);
// go to next node
contentSpecIndex = left;
}
}
break;
}
case XMLElementDecl.TYPE_CHILDREN: {
int attributeIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("content"), fStringPool.addString("elementOnly"), true);
fDeferredDocumentImpl.setAttributeNode(typeIndex, attributeIndex);
int children = createChildren(contentSpecProvider,
contentSpecIndex,
new XMLContentSpec(),
fDeferredDocumentImpl,
-1);
fDeferredDocumentImpl.insertBefore(typeIndex, children, getFirstChildElement(typeIndex));
break;
}
}
} // if defer-node-expansion
else if (fDocumentImpl != null) {
// Build element
// get element declaration; create if necessary
Element schema = XUtil.getLastChildElement(fDocumentType, "schema");
String elementName = fStringPool.toString(elementDecl.rawname);
Element element = XUtil.getLastChildElement(schema, "element", "name", elementName);
if (element == null) {
element = fDocumentImpl.createElement("element");
element.setAttribute("name", elementName);
element.setAttribute("minOccurs", "1");
((AttrImpl)element.getAttributeNode("minOccurs")).setSpecified(false);
element.setAttribute("nullable", "false");
((AttrImpl)element.getAttributeNode("nullable")).setSpecified(false);
element.setAttribute("abstract", "false");
((AttrImpl)element.getAttributeNode("abstract")).setSpecified(false);
element.setAttribute("final", "false");
((AttrImpl)element.getAttributeNode("final")).setSpecified(false);
schema.appendChild(element);
}
// Build content model
// get type element; create if necessary
Element type = XUtil.getLastChildElement(element, "complexType");
if (type == null && contentSpecType != XMLElementDecl.TYPE_MIXED) {
type = fDocumentImpl.createElement("complexType");
// REVISIT: Check for type redeclaration? -Ac
element.insertBefore(type, XUtil.getFirstChildElement(element));
}
// create models
switch (contentSpecType) {
case XMLElementDecl.TYPE_EMPTY: {
type.setAttribute("content", "empty");
break;
}
case XMLElementDecl.TYPE_ANY: {
Element any = fDocumentImpl.createElement("any");
type.insertBefore(any, XUtil.getFirstChildElement(type));
break;
}
case XMLElementDecl.TYPE_MIXED: {
XMLContentSpec contentSpec = new XMLContentSpec();
contentSpecProvider.getContentSpec(contentSpecIndex, contentSpec);
contentSpecIndex = contentSpec.value;
if (contentSpecIndex == -1) {
element.setAttribute("type", "string");
}
else {
if (type == null) {
type = fDocumentImpl.createElement("complexType");
// REVISIT: Check for type redeclaration? -Ac
element.insertBefore(type, XUtil.getFirstChildElement(element));
}
type.setAttribute("content", "mixed");
Element choice = fDocumentImpl.createElement("choice");
choice.setAttribute("minOccurs", "0");
choice.setAttribute("maxOccurs", "unbounded");
type.appendChild(choice);
while (contentSpecIndex != -1) {
// get node
contentSpecProvider.getContentSpec(contentSpecIndex, contentSpec);
int cstype = contentSpec.type;
int csleft = contentSpec.value;
int csright = contentSpec.otherValue;
// if leaf, skip "#PCDATA" and stop
if (cstype == XMLContentSpec.CONTENTSPECNODE_LEAF) {
break;
}
// add right hand leaf
contentSpecProvider.getContentSpec(csright, contentSpec);
Element right = fDocumentImpl.createElement("element");
right.setAttribute("ref", fStringPool.toString(contentSpec.value));
Element ref = XUtil.getFirstChildElement(choice);
choice.insertBefore(right, ref);
// go to next node
contentSpecIndex = csleft;
}
}
break;
}
case XMLElementDecl.TYPE_CHILDREN: {
type.setAttribute("content", "elementOnly");
Element children = createChildren(contentSpecProvider,
contentSpecIndex,
new XMLContentSpec(),
fDocumentImpl,
null);
type.insertBefore(children, XUtil.getFirstChildElement(type));
break;
}
}
} // if NOT defer-node-expansion
} // if grammar-access
} // elementDecl(int,String)
/**
* <!ATTLIST Name AttDef>
*/
public void attlistDecl(QName elementDecl, QName attributeDecl,
int attType, boolean attList, String enumString,
int attDefaultType, int attDefaultValue)
throws Exception {
if (DEBUG_ATTLIST_DECL) {
System.out.println("attlistDecl(" + fStringPool.toString(elementDecl.rawname) + ", " +
fStringPool.toString(attributeDecl.rawname) + ", " +
fStringPool.toString(attType) + ", " +
enumString + ", " +
fStringPool.toString(attDefaultType) + ", " +
fStringPool.toString(attDefaultValue) + ")");
}
// deferred expansion
if (fDeferredDocumentImpl != null) {
// get the default value
if (attDefaultValue != -1) {
if (DEBUG_ATTLIST_DECL) {
System.out.println(" adding default attribute value: "+
fStringPool.toString(attDefaultValue));
}
// get element definition
int elementDefIndex = fDeferredDocumentImpl.lookupElementDefinition(elementDecl.rawname);
// create element definition if not already there
if (elementDefIndex == -1) {
elementDefIndex = fDeferredDocumentImpl.createElementDefinition(elementDecl.rawname);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, elementDefIndex);
}
// add default attribute
int attrIndex =
fDeferredDocumentImpl.createAttribute(attributeDecl.rawname,
attributeDecl.uri,
attDefaultValue,
false);
fDeferredDocumentImpl.appendChild(elementDefIndex, attrIndex);
}
// Create attribute declaration
if (fGrammarAccess) {
// get element declaration; create it if necessary
int schemaIndex = getLastChildElement(fDocumentTypeIndex, "schema");
String elementName = fStringPool.toString(elementDecl.rawname);
int elementIndex = getLastChildElement(schemaIndex, "element", "name", elementName);
if (elementIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(elementName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); //search
fAttrList.endAttrList();
elementIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, elementIndex);
}
// get type element; create it if necessary
int typeIndex = getLastChildElement(elementIndex, "complexType");
if (typeIndex == -1) {
typeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("complexType"), null, -1);
fDeferredDocumentImpl.insertBefore(elementIndex, typeIndex, getLastChildElement(elementIndex));
}
// create attribute and set its attributes
String attributeName = fStringPool.toString(attributeDecl.rawname);
int attributeIndex = getLastChildElement(elementIndex, "attribute", "name", attributeName);
if (attributeIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(attributeName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("1"),
fStringPool.addSymbol("CDATA"),
false,
false); // search
fAttrList.endAttrList();
attributeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("attribute"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(typeIndex, attributeIndex);
// attribute type: CDATA, ENTITY, ... , NMTOKENS; ENUMERATION
if (attType == XMLAttributeDecl.TYPE_ENUMERATION) {
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("base"),
fStringPool.addString("NMTOKEN"),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.endAttrList();
int simpleTypeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("simpleType"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(attributeIndex, simpleTypeIndex);
String tokenizerString = enumString.substring(1, enumString.length() - 1);
StringTokenizer tokenizer = new StringTokenizer(tokenizerString, "|");
while (tokenizer.hasMoreTokens()) {
handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("value"),
fStringPool.addString(tokenizer.nextToken()),
fStringPool.addSymbol("CDATA"),
true,
false); // search
fAttrList.endAttrList();
int enumerationIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("enumeration"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(simpleTypeIndex, enumerationIndex);
}
}
else {
int typeNameIndex = -1;
switch (attType) {
case XMLAttributeDecl.TYPE_ENTITY: {
typeNameIndex = fStringPool.addString(attList?"ENTITIES":"ENTITY");
break;
}
case XMLAttributeDecl.TYPE_ID: {
typeNameIndex = fStringPool.addString("ID");
break;
}
case XMLAttributeDecl.TYPE_IDREF: {
typeNameIndex = fStringPool.addString(attList?"IDREFS":"IDREF");
break;
}
case XMLAttributeDecl.TYPE_NMTOKEN: {
typeNameIndex = fStringPool.addString(attList?"NMTOKENS":"NMTOKEN");
break;
}
case XMLAttributeDecl.TYPE_NOTATION: {
typeNameIndex = fStringPool.addString("NOTATION");
break;
}
case XMLAttributeDecl.TYPE_CDATA:
default: {
typeNameIndex = fStringPool.addString("string");
break;
}
}
int attrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("type"), typeNameIndex, true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, attrIndex);
}
// attribute default type: #IMPLIED, #REQUIRED, #FIXED
boolean fixed = false;
switch (attDefaultType) {
case XMLAttributeDecl.DEFAULT_TYPE_REQUIRED: {
int useAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("use"), fStringPool.addString("required"), true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, useAttrIndex);
break;
}
case XMLAttributeDecl.DEFAULT_TYPE_FIXED: {
fixed = true;
int useAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("use"), fStringPool.addString("fixed"), true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, useAttrIndex);
break;
}
}
// attribute default value
if (attDefaultValue != -1) {
if (!fixed) {
int useAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("use"), fStringPool.addString("default"), true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, useAttrIndex);
}
int valueAttrIndex = fDeferredDocumentImpl.createAttribute(fStringPool.addSymbol("value"), attDefaultValue, true);
fDeferredDocumentImpl.setAttributeNode(attributeIndex, valueAttrIndex);
}
}
}
}
// full expansion
else if (fDocumentImpl != null) {
// get the default value
if (attDefaultValue != -1) {
if (DEBUG_ATTLIST_DECL) {
System.out.println(" adding default attribute value: "+
fStringPool.toString(attDefaultValue));
}
// get element name
String elementName = fStringPool.toString(elementDecl.rawname);
// get element definition node
NamedNodeMap elements = ((DocumentTypeImpl)fDocumentType).getElements();
ElementDefinitionImpl elementDef = (ElementDefinitionImpl)elements.getNamedItem(elementName);
if (elementDef == null) {
elementDef = fDocumentImpl.createElementDefinition(elementName);
((DocumentTypeImpl)fDocumentType).getElements().setNamedItem(elementDef);
}
// REVISIT: Check for uniqueness of element name? -Ac
// REVISIT: what about default attributes with URI? -ALH
// get attribute name and value index
String attrName = fStringPool.toString(attributeDecl.rawname);
String attrValue = fStringPool.toString(attDefaultValue);
// create attribute and set properties
AttrImpl attr = (AttrImpl)fDocumentImpl.createAttribute(attrName);
attr.setValue(attrValue);
attr.setSpecified(false);
// add default attribute to element definition
elementDef.getAttributes().setNamedItem(attr);
}
// Create attribute declaration
try {
if (fGrammarAccess) {
// get element declaration; create it if necessary
Element schema = XUtil.getLastChildElement(fDocumentType, "schema");
String elementName = fStringPool.toString(elementDecl.rawname);
Element element = XUtil.getLastChildElement(schema, "element", "name", elementName);
if (element == null) {
element = fDocumentImpl.createElement("element");
element.setAttribute("name", elementName);
schema.appendChild(element);
}
// get type element; create it if necessary
Element type = XUtil.getLastChildElement(element, "complexType");
if (type == null) {
type = fDocumentImpl.createElement("complexType");
element.insertBefore(type, XUtil.getLastChildElement(element));
}
// create attribute and set its attributes
String attributeName = fStringPool.toString(attributeDecl.rawname);
Element attribute = XUtil.getLastChildElement(element, "attribute", "name", attributeName);
if (attribute == null) {
attribute = fDocumentImpl.createElement("attribute");
attribute.setAttribute("name", attributeName);
attribute.setAttribute("maxOccurs", "1");
((AttrImpl)attribute.getAttributeNode("maxOccurs")).setSpecified(false);
type.appendChild(attribute);
// attribute type: CDATA, ENTITY, ... , NMTOKENS; ENUMERATION
if (attType == XMLAttributeDecl.TYPE_ENUMERATION) {
Element simpleType = fDocumentImpl.createElement("simpleType");
simpleType.setAttribute("base", "NMTOKEN");
attribute.appendChild(simpleType);
String tokenizerString = enumString.substring(1, enumString.length() - 1);
StringTokenizer tokenizer = new StringTokenizer(tokenizerString, "|");
while (tokenizer.hasMoreTokens()) {
Element enumeration = fDocumentImpl.createElement("enumeration");
enumeration.setAttribute("value", tokenizer.nextToken());
simpleType.appendChild(enumeration);
}
}
else {
String typeName = null;
switch (attType) {
case XMLAttributeDecl.TYPE_ENTITY: {
typeName = attList ? "ENTITIES" : "ENTITY";
break;
}
case XMLAttributeDecl.TYPE_ID: {
typeName = "ID";
break;
}
case XMLAttributeDecl.TYPE_IDREF: {
typeName = attList ? "IDREFS" : "IDREF";
break;
}
case XMLAttributeDecl.TYPE_NMTOKEN: {
typeName = attList ? "NMTOKENS" : "NMTOKEN";
break;
}
case XMLAttributeDecl.TYPE_NOTATION: {
typeName = "NOTATION";
break;
}
case XMLAttributeDecl.TYPE_CDATA:
default: {
typeName = "string";
break;
}
}
attribute.setAttribute("type", typeName);
}
// attribute default type: #IMPLIED, #REQUIRED, #FIXED
boolean fixed = false;
switch (attDefaultType) {
case XMLAttributeDecl.DEFAULT_TYPE_REQUIRED: {
attribute.setAttribute("use", "required");
break;
}
case XMLAttributeDecl.DEFAULT_TYPE_FIXED: {
attribute.setAttribute("use", "fixed");
fixed = true;
break;
}
}
// attribute default value
if (attDefaultValue != -1) {
if (!fixed) {
attribute.setAttribute("use", "default");
}
attribute.setAttribute("value", fStringPool.toString(attDefaultValue));
}
}
}
}
catch (Exception e) {
e.printStackTrace(System.err);
}
} // if NOT defer-node-expansion
} // attlistDecl(int,int,int,String,int,int)
/**
* <!ENTITY % Name EntityValue> (internal)
*/
public void internalPEDecl(int entityNameIndex, int entityValueIndex) throws Exception {
if (fDeferredDocumentImpl != null) {
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY % ");
str.append(fStringPool.toString(entityNameIndex));
str.append(" \"");
str.append(fStringPool.toString(entityValueIndex));
str.append("\">");
int commentIndex = fStringPool.addString(str.toString());
int internalPEEntityIndex = fDeferredDocumentImpl.createComment(commentIndex);
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
fDeferredDocumentImpl.appendChild(schemaIndex, internalPEEntityIndex);
}
}
else if (fDocumentImpl != null) {
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY % ");
str.append(fStringPool.toString(entityNameIndex));
str.append(" \"");
str.append(fStringPool.orphanString(entityValueIndex));
str.append("\">");
Node internalPEEntity = fDocumentImpl.createComment(str.toString());
Node schema = XUtil.getFirstChildElement(fDocumentType, "schema");
schema.appendChild(internalPEEntity);
}
}
else {
fStringPool.orphanString(entityValueIndex);
}
}
/**
* <!ENTITY % Name ExternalID> (external)
*/
public void externalPEDecl(int entityNameIndex, int publicIdIndex, int systemIdIndex) throws Exception {
if (fDeferredDocumentImpl != null) {
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(' ');
if (publicIdIndex != -1) {
str.append("PUBLIC \"");
str.append(fStringPool.toString(publicIdIndex));
str.append('"');
if (systemIdIndex != -1) {
str.append(" \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
}
else if (systemIdIndex != -1) {
str.append("SYSTEM \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
str.append('>');
int commentIndex = fStringPool.addString(str.toString());
int externalPEEntityIndex = fDeferredDocumentImpl.createComment(commentIndex);
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
fDeferredDocumentImpl.appendChild(schemaIndex, externalPEEntityIndex);
}
}
else if (fDocumentImpl != null) {
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(' ');
if (publicIdIndex != -1) {
str.append("PUBLIC \"");
str.append(fStringPool.toString(publicIdIndex));
str.append('"');
if (systemIdIndex != -1) {
str.append(" \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
}
else if (systemIdIndex != -1) {
str.append("SYSTEM \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
str.append('>');
Node externalPEEntity = fDocumentImpl.createComment(str.toString());
Node schema = XUtil.getFirstChildElement(fDocumentType, "schema");
schema.appendChild(externalPEEntity);
}
}
}
/**
* <!ENTITY Name EntityValue> (internal)
*/
public void internalEntityDecl(int entityNameIndex, int entityValueIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
if (fDocumentTypeIndex == -1) return; //revisit: should never happen. Exception?
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, -1, -1, -1);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
// REVISIT: Entities were removed from latest working draft. -Ac
// create internal entity declaration
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(" \"");
str.append(fStringPool.toString(entityValueIndex));
str.append("\">");
int commentIndex = fStringPool.addString(str.toString());
int textEntityIndex = fDeferredDocumentImpl.createComment(commentIndex);
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
fDeferredDocumentImpl.appendChild(schemaIndex, textEntityIndex);
}
}
// full expansion
else if (fDocumentImpl != null) {
if (fDocumentType == null) return; //revisit: should never happen. Exception?
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
Entity entity = fDocumentImpl.createEntity(entityName);
fDocumentType.getEntities().setNamedItem(entity);
// REVISIT: Entities were removed from latest working draft. -Ac
// create internal entity declaration
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(" \"");
str.append(fStringPool.toString(entityValueIndex));
str.append("\">");
Node textEntity = fDocumentImpl.createComment(str.toString());
Node schema = XUtil.getFirstChildElement(fDocumentType, "schema");
schema.appendChild(textEntity);
}
}
} // internalEntityDecl(int,int)
/**
* <!ENTITY Name ExternalID> (external)
*/
public void externalEntityDecl(int entityNameIndex, int publicIdIndex, int systemIdIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, publicIdIndex, systemIdIndex, -1);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
// REVISIT: Entities were removed from latest working draft. -Ac
// create external entity declaration
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(' ');
if (publicIdIndex != -1) {
str.append("PUBLIC \"");
str.append(fStringPool.toString(publicIdIndex));
str.append('"');
if (systemIdIndex != -1) {
str.append(" \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
}
else if (systemIdIndex != -1) {
str.append("SYSTEM \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
str.append('>');
int commentIndex = fStringPool.addString(str.toString());
int externalEntityIndex = fDeferredDocumentImpl.createComment(commentIndex);
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
fDeferredDocumentImpl.appendChild(schemaIndex, externalEntityIndex);
}
}
// full expansion
else if (fDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
EntityImpl entity = (EntityImpl)fDocumentImpl.createEntity(entityName);
if (publicIdIndex != -1) {
entity.setPublicId(publicId);
}
entity.setSystemId(systemId);
fDocumentType.getEntities().setNamedItem(entity);
// REVISIT: Entities were removed from latest working draft. -Ac
// create external entity declaration
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(' ');
if (publicIdIndex != -1) {
str.append("PUBLIC \"");
str.append(fStringPool.toString(publicIdIndex));
str.append('"');
if (systemIdIndex != -1) {
str.append(" \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
}
else if (systemIdIndex != -1) {
str.append("SYSTEM \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
str.append('>');
Node externalEntity = fDocumentImpl.createComment(str.toString());
Node schema = XUtil.getFirstChildElement(fDocumentType, "schema");
schema.appendChild(externalEntity);
}
}
} // externalEntityDecl(int,int,int)
/**
* <!ENTITY Name ExternalID NDataDecl> (unparsed)
*/
public void unparsedEntityDecl(int entityNameIndex,
int publicIdIndex, int systemIdIndex,
int notationNameIndex) throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newEntityIndex = fDeferredDocumentImpl.createEntity(entityNameIndex, publicIdIndex, systemIdIndex, notationNameIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newEntityIndex);
// REVISIT: Entities were removed from latest working draft. -Ac
// add unparsed entity declaration
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(' ');
if (publicIdIndex != -1) {
str.append("PUBLIC \"");
str.append(fStringPool.toString(publicIdIndex));
str.append('"');
if (systemIdIndex != -1) {
str.append(" \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
}
else if (systemIdIndex != -1) {
str.append("SYSTEM \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
str.append(" NDATA ");
str.append(fStringPool.toString(notationNameIndex));
str.append('>');
int commentIndex = fStringPool.addString(str.toString());
int unparsedEntityIndex = fDeferredDocumentImpl.createComment(commentIndex);
int schemaIndex = getFirstChildElement(fDocumentTypeIndex, "schema");
fDeferredDocumentImpl.appendChild(schemaIndex, unparsedEntityIndex);
}
}
// full expansion
else if (fDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String entityName = fStringPool.toString(entityNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
String notationName = fStringPool.toString(notationNameIndex);
EntityImpl entity = (EntityImpl)fDocumentImpl.createEntity(entityName);
if (publicIdIndex != -1) {
entity.setPublicId(publicId);
}
entity.setSystemId(systemId);
entity.setNotationName(notationName);
fDocumentType.getEntities().setNamedItem(entity);
// REVISIT: Entities were removed from latest working draft. -Ac
// add unparsed entity declaration
if (fGrammarAccess) {
StringBuffer str = new StringBuffer();
str.append("<!ENTITY ");
str.append(fStringPool.toString(entityNameIndex));
str.append(' ');
if (publicIdIndex != -1) {
str.append("PUBLIC \"");
str.append(fStringPool.toString(publicIdIndex));
str.append('"');
if (systemIdIndex != -1) {
str.append(" \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
}
else if (systemIdIndex != -1) {
str.append("SYSTEM \"");
str.append(fStringPool.toString(systemIdIndex));
str.append('"');
}
str.append(" NDATA ");
str.append(fStringPool.toString(notationNameIndex));
str.append('>');
Node unparsedEntity = fDocumentImpl.createComment(str.toString());
Node schema = XUtil.getFirstChildElement(fDocumentType, "schema");
schema.appendChild(unparsedEntity);
}
}
} // unparsedEntityDecl(int,int,int,int)
/**
* <!NOTATION Name ExternalId>
*/
public void notationDecl(int notationNameIndex, int publicIdIndex, int systemIdIndex)
throws Exception {
// deferred expansion
if (fDeferredDocumentImpl != null) {
//revisit: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
int newNotationIndex = fDeferredDocumentImpl.createNotation(notationNameIndex, publicIdIndex, systemIdIndex);
fDeferredDocumentImpl.appendChild(fDocumentTypeIndex, newNotationIndex);
// create notation declaration
if (fGrammarAccess) {
int schemaIndex = getLastChildElement(fDocumentTypeIndex, "schema");
String notationName = fStringPool.toString(notationNameIndex);
int notationIndex = getLastChildElement(schemaIndex, "notation", "name", notationName);
if (notationIndex == -1) {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("name"),
fStringPool.addString(notationName),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
if (publicIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("public"),
publicIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
if (systemIdIndex != -1) {
fAttrList.addAttr(
fStringPool.addSymbol("system"),
systemIdIndex,
fStringPool.addSymbol("CDATA"),
true,
false); // search
}
fAttrList.endAttrList();
notationIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol("notation"), fAttrList, handle);
fDeferredDocumentImpl.appendChild(schemaIndex, notationIndex);
}
}
}
// full expansion
else if (fDocumentImpl != null) {
// REVISIT: how to check if entity was already declared.
// XML spec says that 1st Entity decl is binding.
String notationName = fStringPool.toString(notationNameIndex);
String publicId = fStringPool.toString(publicIdIndex);
String systemId = fStringPool.toString(systemIdIndex);
NotationImpl notationImpl = (NotationImpl)fDocumentImpl.createNotation(notationName);
notationImpl.setPublicId(publicId);
if (systemIdIndex != -1) {
notationImpl.setSystemId(systemId);
}
fDocumentType.getNotations().setNamedItem(notationImpl);
// create notation declaration
if (fGrammarAccess) {
Element schema = XUtil.getFirstChildElement(fDocumentType, "schema");
Element notation = XUtil.getFirstChildElement(schema, "notation", "name", notationName);
if (notation == null) {
notation = fDocument.createElement("notation");
notation.setAttribute("name", notationName);
//notation.setAttribute("export", "true");
//((AttrImpl)notation.getAttributeNode("export")).setSpecified(false);
if (publicId != null) {
notation.setAttribute("public", publicId);
}
if (systemIdIndex != -1) {
notation.setAttribute("system", systemId);
}
schema.appendChild(notation);
}
}
}
} // notationDecl(int,int,int)
// Private methods
/** Returns the first child element of the specified node. */
private int getFirstChildElement(int nodeIndex) {
int childIndex = getLastChildElement(nodeIndex);
while (childIndex != -1) {
int prevIndex = getPrevSiblingElement(childIndex);
if (prevIndex == -1) {
break;
}
childIndex = prevIndex;
}
return childIndex;
}
/** Returns the first child element of the specified node. */
private int getFirstChildElement(int nodeIndex, String name) {
int childIndex = getLastChildElement(nodeIndex);
if (childIndex != -1) {
int nameIndex = fStringPool.addSymbol(name);
while (childIndex != -1) {
if (fDeferredDocumentImpl.getNodeName(childIndex, false) == nameIndex) {
break;
}
int prevIndex = getPrevSiblingElement(childIndex);
childIndex = prevIndex;
}
}
return childIndex;
}
/** Returns the last child element of the specified node. */
private int getLastChildElement(int nodeIndex) {
int childIndex = fDeferredDocumentImpl.getLastChild(nodeIndex, false);
while (childIndex != -1) {
if (fDeferredDocumentImpl.getNodeType(childIndex, false) == Node.ELEMENT_NODE) {
return childIndex;
}
childIndex = fDeferredDocumentImpl.getPrevSibling(childIndex, false);
}
return -1;
}
/** Returns the previous sibling element of the specified node. */
private int getPrevSiblingElement(int nodeIndex) {
int siblingIndex = fDeferredDocumentImpl.getPrevSibling(nodeIndex, false);
while (siblingIndex != -1) {
if (fDeferredDocumentImpl.getNodeType(siblingIndex, false) == Node.ELEMENT_NODE) {
return siblingIndex;
}
siblingIndex = fDeferredDocumentImpl.getPrevSibling(siblingIndex, false);
}
return -1;
}
/** Returns the first child element with the given name. */
private int getLastChildElement(int nodeIndex, String elementName) {
int childIndex = getLastChildElement(nodeIndex);
if (childIndex != -1) {
while (childIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(childIndex, false);
if (nodeName.equals(elementName)) {
return childIndex;
}
childIndex = getPrevSiblingElement(childIndex);
}
}
return -1;
}
/** Returns the next sibling element with the given name. */
private int getPrevSiblingElement(int nodeIndex, String elementName) {
int siblingIndex = getPrevSiblingElement(nodeIndex);
if (siblingIndex != -1) {
while (siblingIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(siblingIndex, false);
if (nodeName.equals(elementName)) {
return siblingIndex;
}
siblingIndex = getPrevSiblingElement(siblingIndex);
}
}
return -1;
}
/** Returns the first child element with the given name. */
private int getLastChildElement(int nodeIndex, String elemName, String attrName, String attrValue) {
int childIndex = getLastChildElement(nodeIndex, elemName);
if (childIndex != -1) {
while (childIndex != -1) {
int attrIndex = fDeferredDocumentImpl.getNodeValue(childIndex, false);
while (attrIndex != -1) {
String nodeName = fDeferredDocumentImpl.getNodeNameString(attrIndex, false);
if (nodeName.equals(attrName)) {
// REVISIT: Do we need to normalize the text? -Ac
int textIndex = fDeferredDocumentImpl.getLastChild(attrIndex, false);
String nodeValue = fDeferredDocumentImpl.getNodeValueString(textIndex, false);
if (nodeValue.equals(attrValue)) {
return childIndex;
}
}
attrIndex = fDeferredDocumentImpl.getPrevSibling(attrIndex, false);
}
childIndex = getPrevSiblingElement(childIndex, elemName);
}
}
return -1;
}
/** Returns the next sibling element with the given name and attribute. */
private int getPrevSiblingElement(int nodeIndex, String elemName, String attrName, String attrValue) {
int siblingIndex = getPrevSiblingElement(nodeIndex, elemName);
if (siblingIndex != -1) {
int attributeNameIndex = fStringPool.addSymbol(attrName);
while (siblingIndex != -1) {
int attrIndex = fDeferredDocumentImpl.getNodeValue(siblingIndex, false);
while (attrIndex != -1) {
int attrValueIndex = fDeferredDocumentImpl.getNodeValue(attrIndex, false);
if (attrValue.equals(fStringPool.toString(attrValueIndex))) {
return siblingIndex;
}
attrIndex = fDeferredDocumentImpl.getPrevSibling(attrIndex, false);
}
siblingIndex = getPrevSiblingElement(siblingIndex, elemName);
}
}
return -1;
}
/**
* Copies the source tree into the specified place in a destination
* tree. The source node and its children are appended as children
* of the destination node.
* <p>
* <em>Note:</em> This is an iterative implementation.
*/
private void copyInto(Node src, int destIndex) throws Exception {
// for ignorable whitespace features
boolean domimpl = src != null && src instanceof DocumentImpl;
// placement variables
Node start = src;
Node parent = src;
Node place = src;
// traverse source tree
while (place != null) {
// copy this node
int nodeIndex = -1;
short type = place.getNodeType();
switch (type) {
case Node.CDATA_SECTION_NODE: {
boolean ignorable = domimpl && ((TextImpl)place).isIgnorableWhitespace();
nodeIndex = fDeferredDocumentImpl.createCDATASection(fStringPool.addString(place.getNodeValue()), ignorable);
break;
}
case Node.COMMENT_NODE: {
nodeIndex = fDeferredDocumentImpl.createComment(fStringPool.addString(place.getNodeValue()));
break;
}
case Node.ELEMENT_NODE: {
XMLAttrList attrList = null;
int handle = -1;
NamedNodeMap attrs = place.getAttributes();
if (attrs != null) {
int length = attrs.getLength();
if (length > 0) {
handle = fAttrList.startAttrList();
for (int i = 0; i < length; i++) {
Attr attr = (Attr)attrs.item(i);
String attrName = attr.getNodeName();
String attrValue = attr.getNodeValue();
fAttrList.addAttr(
fStringPool.addSymbol(attrName),
fStringPool.addString(attrValue),
fStringPool.addSymbol("CDATA"), // REVISIT
attr.getSpecified(),
false); // search
}
fAttrList.endAttrList();
attrList = fAttrList;
}
}
nodeIndex = fDeferredDocumentImpl.createElement(fStringPool.addSymbol(place.getNodeName()), attrList, handle);
break;
}
case Node.ENTITY_REFERENCE_NODE: {
nodeIndex = fDeferredDocumentImpl.createEntityReference(fStringPool.addSymbol(place.getNodeName()));
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: {
nodeIndex = fDeferredDocumentImpl.createProcessingInstruction(fStringPool.addSymbol(place.getNodeName()), fStringPool.addString(place.getNodeValue()));
break;
}
case Node.TEXT_NODE: {
boolean ignorable = domimpl && ((TextImpl)place).isIgnorableWhitespace();
nodeIndex = fDeferredDocumentImpl.createTextNode(fStringPool.addString(place.getNodeValue()), ignorable);
break;
}
default: {
throw new IllegalArgumentException("PAR010 Can't copy node type, "+
type+" ("+
place.getNodeName()+')'
+"\n"+type+"\t"+place.getNodeName());
}
}
fDeferredDocumentImpl.appendChild(destIndex, nodeIndex);
// iterate over children
if (place.hasChildNodes()) {
parent = place;
place = place.getFirstChild();
destIndex = nodeIndex;
}
// advance
else {
place = place.getNextSibling();
while (place == null && parent != start) {
place = parent.getNextSibling();
parent = parent.getParentNode();
destIndex = fDeferredDocumentImpl.getParentNode(destIndex, false);
}
}
}
} // copyInto(Node,int)
/**
* Sets the appropriate occurrence count attributes on the specified
* model element.
*/
private void setOccurrenceCount(Element model, int minOccur, int maxOccur) {
// min
model.setAttribute("minOccurs", Integer.toString(minOccur));
if (minOccur == 1) {
((AttrImpl)model.getAttributeNode("minOccurs")).setSpecified(false);
}
// max
if (maxOccur == -1) {
model.setAttribute("maxOccurs", "*");
}
else if (maxOccur != 1) {
model.setAttribute("maxOccurs", Integer.toString(maxOccur));
}
} // setOccurrenceCount(Element,int,int)
/** Creates the children for the element decl. */
private Element createChildren(XMLContentSpec.Provider provider,
int index, XMLContentSpec node,
DocumentImpl factory,
Element parent) throws Exception {
// get occurrence count
provider.getContentSpec(index, node);
int occurs = -1;
switch (node.type) {
case XMLContentSpec.CONTENTSPECNODE_ONE_OR_MORE: {
occurs = '+';
provider.getContentSpec(node.value, node);
break;
}
case XMLContentSpec.CONTENTSPECNODE_ZERO_OR_MORE: {
occurs = '*';
provider.getContentSpec(node.value, node);
break;
}
case XMLContentSpec.CONTENTSPECNODE_ZERO_OR_ONE: {
occurs = '?';
provider.getContentSpec(node.value, node);
break;
}
}
// flatten model
int nodeType = node.type;
switch (nodeType) {
// CHOICE or SEQUENCE
case XMLContentSpec.CONTENTSPECNODE_CHOICE:
case XMLContentSpec.CONTENTSPECNODE_SEQ: {
// go down left side
int leftIndex = node.value;
int rightIndex = node.otherValue;
Element left = createChildren(provider, leftIndex, node,
factory, parent);
// go down right side
Element right = createChildren(provider, rightIndex, node,
factory, null);
// append left children
boolean choice = nodeType == XMLContentSpec.CONTENTSPECNODE_CHOICE;
String type = choice ? "choice" : "sequence";
Element model = left;
if (!left.getNodeName().equals(type)) {
String minOccurs = left.getAttribute("minOccurs");
String maxOccurs = left.getAttribute("maxOccurs");
boolean min1 = minOccurs.length() == 0 || minOccurs.equals("1");
boolean max1 = maxOccurs.length() == 0 || maxOccurs.equals("1");
if (parent == null || (min1 && max1)) {
model = factory.createElement(type);
model.appendChild(left);
}
else {
model = parent;
}
}
// set occurrence count
switch (occurs) {
case '+': {
model.setAttribute("maxOccurs", "unbounded");
break;
}
case '*': {
model.setAttribute("minOccurs", "0");
model.setAttribute("maxOccurs", "unbounded");
break;
}
case '?': {
model.setAttribute("minOccurs", "0");
break;
}
}
// append right children
model.appendChild(right);
// return model
return model;
}
// LEAF
case XMLContentSpec.CONTENTSPECNODE_LEAF: {
Element leaf = factory.createElement("element");
leaf.setAttribute("ref", fStringPool.toString(node.value));
switch (occurs) {
case '+': {
leaf.setAttribute("maxOccurs", "unbounded");
break;
}
case '*': {
leaf.setAttribute("minOccurs", "0");
leaf.setAttribute("maxOccurs", "unbounded");
break;
}
case '?': {
leaf.setAttribute("minOccurs", "0");
break;
}
}
return leaf;
}
} // switch node type
// error
return null;
} // createChildren(XMLContentSpec.Provider,int,XMLContentSpec,DocumentImpl,Element):Element
/** Creates the children for the deferred element decl. */
private int createChildren(XMLContentSpec.Provider provider,
int index, XMLContentSpec node,
DeferredDocumentImpl factory,
int parent) throws Exception {
// get occurrence count
provider.getContentSpec(index, node);
int occurs = -1;
switch (node.type) {
case XMLContentSpec.CONTENTSPECNODE_ONE_OR_MORE: {
occurs = '+';
provider.getContentSpec(node.value, node);
break;
}
case XMLContentSpec.CONTENTSPECNODE_ZERO_OR_MORE: {
occurs = '*';
provider.getContentSpec(node.value, node);
break;
}
case XMLContentSpec.CONTENTSPECNODE_ZERO_OR_ONE: {
occurs = '?';
provider.getContentSpec(node.value, node);
break;
}
}
// flatten model
int nodeType = node.type;
switch (nodeType) {
// CHOICE or SEQUENCE
case XMLContentSpec.CONTENTSPECNODE_CHOICE:
case XMLContentSpec.CONTENTSPECNODE_SEQ: {
// go down left side
int leftIndex = node.value;
int rightIndex = node.otherValue;
int left = createChildren(provider, leftIndex, node,
factory, parent);
// go down right side
int right = createChildren(provider, rightIndex, node,
factory, -1);
// append left children
boolean choice = nodeType == XMLContentSpec.CONTENTSPECNODE_CHOICE;
int type = fStringPool.addSymbol(choice ? "choice" : "sequence");
int model = left;
if (factory.getNodeName(left, false) != type) {
int minOccurs = factory.getAttribute(left, fStringPool.addSymbol("minOccurs"));
int maxOccurs = factory.getAttribute(left, fStringPool.addSymbol("maxOccurs"));
boolean min1 = minOccurs == -1 || fStringPool.toString(minOccurs).equals("1");
boolean max1 = maxOccurs == -1 || fStringPool.toString(maxOccurs).equals("1");
if (parent == -1 || (min1 && max1)) {
model = factory.createElement(type, null, -1);
factory.appendChild(model, left);
}
else {
model = parent;
}
}
// set occurrence count
switch (occurs) {
case '+': {
int maxOccurs = factory.createAttribute(fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("unbounded"),
true);
factory.setAttributeNode(model, maxOccurs);
break;
}
case '*': {
int minOccurs = factory.createAttribute(fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
true);
factory.setAttributeNode(model, minOccurs);
int maxOccurs = factory.createAttribute(fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("unbounded"),
true);
factory.setAttributeNode(model, maxOccurs);
break;
}
case '?': {
int minOccurs = factory.createAttribute(fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
true);
factory.setAttributeNode(model, minOccurs);
break;
}
}
// append right children
factory.appendChild(model, right);
// return model
return model;
}
// LEAF
case XMLContentSpec.CONTENTSPECNODE_LEAF: {
int handle = fAttrList.startAttrList();
fAttrList.addAttr(
fStringPool.addSymbol("ref"),
fStringPool.addString(fStringPool.toString(node.value)),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
switch (occurs) {
case '+': {
fAttrList.addAttr(
fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("unbounded"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
break;
}
case '*': {
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
fAttrList.addAttr(
fStringPool.addSymbol("maxOccurs"),
fStringPool.addString("unbounded"),
fStringPool.addSymbol("CDATA"),
true,
false); // search
break;
}
case '?': {
fAttrList.addAttr(
fStringPool.addSymbol("minOccurs"),
fStringPool.addString("0"),
fStringPool.addSymbol("NMTOKEN"),
true,
false); // search
break;
}
}
fAttrList.endAttrList();
int leaf = factory.createElement(fStringPool.addSymbol("element"), fAttrList, handle);
return leaf;
}
} // switch node type
// error
return -1;
} // createChildren(XMLContentSpec.Provider,int,XMLContentSpec,DeferredDocumentImpl,int):int
} // class DOMParser
|
package org.bouncycastle.asn1.x509;
import org.bouncycastle.asn1.ASN1Encodable;
import org.bouncycastle.asn1.ASN1EncodableVector;
import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.ASN1Set;
import org.bouncycastle.asn1.ASN1TaggedObject;
import org.bouncycastle.asn1.DEREncodable;
import org.bouncycastle.asn1.DERObject;
import org.bouncycastle.asn1.DERObjectIdentifier;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.DERSet;
import org.bouncycastle.asn1.DERString;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.util.Strings;
import org.bouncycastle.util.encoders.Hex;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
/**
* <pre>
* RDNSequence ::= SEQUENCE OF RelativeDistinguishedName
*
* RelativeDistinguishedName ::= SET SIZE (1..MAX) OF AttributeTypeAndValue
*
* AttributeTypeAndValue ::= SEQUENCE {
* type OBJECT IDENTIFIER,
* value ANY }
* </pre>
*/
public class X509Name
extends ASN1Encodable
{
/**
* country code - StringType(SIZE(2))
*/
public static final DERObjectIdentifier C = new DERObjectIdentifier("2.5.4.6");
/**
* organization - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier O = new DERObjectIdentifier("2.5.4.10");
/**
* organizational unit name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier OU = new DERObjectIdentifier("2.5.4.11");
/**
* Title
*/
public static final DERObjectIdentifier T = new DERObjectIdentifier("2.5.4.12");
/**
* common name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier CN = new DERObjectIdentifier("2.5.4.3");
/**
* device serial number name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier SN = new DERObjectIdentifier("2.5.4.5");
/**
* street - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier STREET = new DERObjectIdentifier("2.5.4.9");
/**
* device serial number name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier SERIALNUMBER = SN;
/**
* locality name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier L = new DERObjectIdentifier("2.5.4.7");
/**
* state, or province name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier ST = new DERObjectIdentifier("2.5.4.8");
/**
* Naming attributes of type X520name
*/
public static final DERObjectIdentifier SURNAME = new DERObjectIdentifier("2.5.4.4");
public static final DERObjectIdentifier GIVENNAME = new DERObjectIdentifier("2.5.4.42");
public static final DERObjectIdentifier INITIALS = new DERObjectIdentifier("2.5.4.43");
public static final DERObjectIdentifier GENERATION = new DERObjectIdentifier("2.5.4.44");
public static final DERObjectIdentifier UNIQUE_IDENTIFIER = new DERObjectIdentifier("2.5.4.45");
/**
* businessCategory - DirectoryString(SIZE(1..128)
*/
public static final DERObjectIdentifier BUSINESS_CATEGORY = new DERObjectIdentifier(
"2.5.4.15");
/**
* postalCode - DirectoryString(SIZE(1..40)
*/
public static final DERObjectIdentifier POSTAL_CODE = new DERObjectIdentifier(
"2.5.4.17");
/**
* dnQualifier - DirectoryString(SIZE(1..64)
*/
public static final DERObjectIdentifier DN_QUALIFIER = new DERObjectIdentifier(
"2.5.4.46");
/**
* RFC 3039 Pseudonym - DirectoryString(SIZE(1..64)
*/
public static final DERObjectIdentifier PSEUDONYM = new DERObjectIdentifier(
"2.5.4.65");
/**
* RFC 3039 DateOfBirth - GeneralizedTime - YYYYMMDD000000Z
*/
public static final DERObjectIdentifier DATE_OF_BIRTH = new DERObjectIdentifier(
"1.3.6.1.5.5.7.9.1");
/**
* RFC 3039 PlaceOfBirth - DirectoryString(SIZE(1..128)
*/
public static final DERObjectIdentifier PLACE_OF_BIRTH = new DERObjectIdentifier(
"1.3.6.1.5.5.7.9.2");
/**
* RFC 3039 Gender - PrintableString (SIZE(1)) -- "M", "F", "m" or "f"
*/
public static final DERObjectIdentifier GENDER = new DERObjectIdentifier(
"1.3.6.1.5.5.7.9.3");
/**
* RFC 3039 CountryOfCitizenship - PrintableString (SIZE (2)) -- ISO 3166
* codes only
*/
public static final DERObjectIdentifier COUNTRY_OF_CITIZENSHIP = new DERObjectIdentifier(
"1.3.6.1.5.5.7.9.4");
/**
* RFC 3039 CountryOfResidence - PrintableString (SIZE (2)) -- ISO 3166
* codes only
*/
public static final DERObjectIdentifier COUNTRY_OF_RESIDENCE = new DERObjectIdentifier(
"1.3.6.1.5.5.7.9.5");
/**
* ISIS-MTT NameAtBirth - DirectoryString(SIZE(1..64)
*/
public static final DERObjectIdentifier NAME_AT_BIRTH = new DERObjectIdentifier("1.3.36.8.3.14");
/**
* RFC 3039 PostalAddress - SEQUENCE SIZE (1..6) OF
* DirectoryString(SIZE(1..30))
*/
public static final DERObjectIdentifier POSTAL_ADDRESS = new DERObjectIdentifier(
"2.5.4.16");
/**
* Email address (RSA PKCS#9 extension) - IA5String.
* <p>Note: if you're trying to be ultra orthodox, don't use this! It shouldn't be in here.
*/
public static final DERObjectIdentifier EmailAddress = PKCSObjectIdentifiers.pkcs_9_at_emailAddress;
/**
* more from PKCS#9
*/
public static final DERObjectIdentifier UnstructuredName = PKCSObjectIdentifiers.pkcs_9_at_unstructuredName;
public static final DERObjectIdentifier UnstructuredAddress = PKCSObjectIdentifiers.pkcs_9_at_unstructuredAddress;
/**
* email address in Verisign certificates
*/
public static final DERObjectIdentifier E = EmailAddress;
/*
* others...
*/
public static final DERObjectIdentifier DC = new DERObjectIdentifier("0.9.2342.19200300.100.1.25");
/**
* LDAP User id.
*/
public static final DERObjectIdentifier UID = new DERObjectIdentifier("0.9.2342.19200300.100.1.1");
/**
* determines whether or not strings should be processed and printed
* from back to front.
*/
public static boolean DefaultReverse = false;
/**
* default look up table translating OID values into their common symbols following
* the convention in RFC 2253 with a few extras
*/
public static final Hashtable DefaultSymbols = new Hashtable();
/**
* look up table translating OID values into their common symbols following the convention in RFC 2253
*
*/
public static final Hashtable RFC2253Symbols = new Hashtable();
/**
* look up table translating OID values into their common symbols following the convention in RFC 1779
*
*/
public static final Hashtable RFC1779Symbols = new Hashtable();
/**
* look up table translating common symbols into their OIDS.
*/
public static final Hashtable DefaultLookUp = new Hashtable();
/**
* look up table translating OID values into their common symbols
* @deprecated use DefaultSymbols
*/
public static final Hashtable OIDLookUp = DefaultSymbols;
/**
* look up table translating string values into their OIDS -
* @deprecated use DefaultLookUp
*/
public static final Hashtable SymbolLookUp = DefaultLookUp;
private static final Boolean TRUE = new Boolean(true); // for J2ME compatibility
private static final Boolean FALSE = new Boolean(false);
static
{
DefaultSymbols.put(C, "C");
DefaultSymbols.put(O, "O");
DefaultSymbols.put(T, "T");
DefaultSymbols.put(OU, "OU");
DefaultSymbols.put(CN, "CN");
DefaultSymbols.put(L, "L");
DefaultSymbols.put(ST, "ST");
DefaultSymbols.put(SN, "SERIALNUMBER");
DefaultSymbols.put(EmailAddress, "E");
DefaultSymbols.put(DC, "DC");
DefaultSymbols.put(UID, "UID");
DefaultSymbols.put(STREET, "STREET");
DefaultSymbols.put(SURNAME, "SURNAME");
DefaultSymbols.put(GIVENNAME, "GIVENNAME");
DefaultSymbols.put(INITIALS, "INITIALS");
DefaultSymbols.put(GENERATION, "GENERATION");
DefaultSymbols.put(UnstructuredAddress, "unstructuredAddress");
DefaultSymbols.put(UnstructuredName, "unstructuredName");
DefaultSymbols.put(UNIQUE_IDENTIFIER, "UniqueIdentifier");
DefaultSymbols.put(DN_QUALIFIER, "DN");
DefaultSymbols.put(PSEUDONYM, "Pseudonym");
DefaultSymbols.put(POSTAL_ADDRESS, "PostalAddress");
DefaultSymbols.put(NAME_AT_BIRTH, "NameAtBirth");
DefaultSymbols.put(COUNTRY_OF_CITIZENSHIP, "CountryOfCitizenship");
DefaultSymbols.put(COUNTRY_OF_RESIDENCE, "CountryOfResidence");
DefaultSymbols.put(GENDER, "Gender");
DefaultSymbols.put(PLACE_OF_BIRTH, "PlaceOfBirth");
DefaultSymbols.put(DATE_OF_BIRTH, "DateOfBirth");
DefaultSymbols.put(POSTAL_CODE, "PostalCode");
DefaultSymbols.put(BUSINESS_CATEGORY, "BusinessCategory");
RFC2253Symbols.put(C, "C");
RFC2253Symbols.put(O, "O");
RFC2253Symbols.put(OU, "OU");
RFC2253Symbols.put(CN, "CN");
RFC2253Symbols.put(L, "L");
RFC2253Symbols.put(ST, "ST");
RFC2253Symbols.put(STREET, "STREET");
RFC2253Symbols.put(DC, "DC");
RFC2253Symbols.put(UID, "UID");
RFC1779Symbols.put(C, "C");
RFC1779Symbols.put(O, "O");
RFC1779Symbols.put(OU, "OU");
RFC1779Symbols.put(CN, "CN");
RFC1779Symbols.put(L, "L");
RFC1779Symbols.put(ST, "ST");
RFC1779Symbols.put(STREET, "STREET");
DefaultLookUp.put("c", C);
DefaultLookUp.put("o", O);
DefaultLookUp.put("t", T);
DefaultLookUp.put("ou", OU);
DefaultLookUp.put("cn", CN);
DefaultLookUp.put("l", L);
DefaultLookUp.put("st", ST);
DefaultLookUp.put("sn", SN);
DefaultLookUp.put("serialnumber", SN);
DefaultLookUp.put("street", STREET);
DefaultLookUp.put("emailaddress", E);
DefaultLookUp.put("dc", DC);
DefaultLookUp.put("e", E);
DefaultLookUp.put("uid", UID);
DefaultLookUp.put("surname", SURNAME);
DefaultLookUp.put("givenname", GIVENNAME);
DefaultLookUp.put("initials", INITIALS);
DefaultLookUp.put("generation", GENERATION);
DefaultLookUp.put("unstructuredaddress", UnstructuredAddress);
DefaultLookUp.put("unstructuredname", UnstructuredName);
DefaultLookUp.put("uniqueidentifier", UNIQUE_IDENTIFIER);
DefaultLookUp.put("dn", DN_QUALIFIER);
DefaultLookUp.put("pseudonym", PSEUDONYM);
DefaultLookUp.put("postaladdress", POSTAL_ADDRESS);
DefaultLookUp.put("nameofbirth", NAME_AT_BIRTH);
DefaultLookUp.put("countryofcitizenship", COUNTRY_OF_CITIZENSHIP);
DefaultLookUp.put("countryofresidence", COUNTRY_OF_RESIDENCE);
DefaultLookUp.put("gender", GENDER);
DefaultLookUp.put("placeofbirth", PLACE_OF_BIRTH);
DefaultLookUp.put("dateofbirth", DATE_OF_BIRTH);
DefaultLookUp.put("postalcode", POSTAL_CODE);
DefaultLookUp.put("businesscategory", BUSINESS_CATEGORY);
}
private X509NameEntryConverter converter = null;
private Vector ordering = new Vector();
private Vector values = new Vector();
private Vector added = new Vector();
private ASN1Sequence seq;
/**
* Return a X509Name based on the passed in tagged object.
*
* @param obj tag object holding name.
* @param explicit true if explicitly tagged false otherwise.
* @return the X509Name
*/
public static X509Name getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
return getInstance(ASN1Sequence.getInstance(obj, explicit));
}
public static X509Name getInstance(
Object obj)
{
if (obj == null || obj instanceof X509Name)
{
return (X509Name)obj;
}
else if (obj instanceof ASN1Sequence)
{
return new X509Name((ASN1Sequence)obj);
}
throw new IllegalArgumentException("unknown object in factory \"" + obj.getClass().getName()+"\"");
}
/**
* Constructor from ASN1Sequence
*
* the principal will be a list of constructed sets, each containing an (OID, String) pair.
*/
public X509Name(
ASN1Sequence seq)
{
this.seq = seq;
Enumeration e = seq.getObjects();
while (e.hasMoreElements())
{
ASN1Set set = ASN1Set.getInstance(e.nextElement());
for (int i = 0; i < set.size(); i++)
{
ASN1Sequence s = ASN1Sequence.getInstance(set.getObjectAt(i));
if (s.size() != 2)
{
throw new IllegalArgumentException("badly sized pair");
}
ordering.addElement(DERObjectIdentifier.getInstance(s.getObjectAt(0)));
DEREncodable value = s.getObjectAt(1);
if (value instanceof DERString)
{
values.addElement(((DERString)value).getString());
}
else
{
values.addElement("#" + bytesToString(Hex.encode(value.getDERObject().getDEREncoded())));
}
added.addElement((i != 0) ? TRUE : FALSE); // to allow earlier JDK compatibility
}
}
}
/**
* constructor from a table of attributes.
* <p>
* it's is assumed the table contains OID/String pairs, and the contents
* of the table are copied into an internal table as part of the
* construction process.
* <p>
* <b>Note:</b> if the name you are trying to generate should be
* following a specific ordering, you should use the constructor
* with the ordering specified below.
* @deprecated use an ordered constructor! The hashtable ordering is rarely correct
*/
public X509Name(
Hashtable attributes)
{
this(null, attributes);
}
/**
* Constructor from a table of attributes with ordering.
* <p>
* it's is assumed the table contains OID/String pairs, and the contents
* of the table are copied into an internal table as part of the
* construction process. The ordering vector should contain the OIDs
* in the order they are meant to be encoded or printed in toString.
*/
public X509Name(
Vector ordering,
Hashtable attributes)
{
this(ordering, attributes, new X509DefaultEntryConverter());
}
/**
* Constructor from a table of attributes with ordering.
* <p>
* it's is assumed the table contains OID/String pairs, and the contents
* of the table are copied into an internal table as part of the
* construction process. The ordering vector should contain the OIDs
* in the order they are meant to be encoded or printed in toString.
* <p>
* The passed in converter will be used to convert the strings into their
* ASN.1 counterparts.
*/
public X509Name(
Vector ordering,
Hashtable attributes,
X509NameEntryConverter converter)
{
this.converter = converter;
if (ordering != null)
{
for (int i = 0; i != ordering.size(); i++)
{
this.ordering.addElement(ordering.elementAt(i));
this.added.addElement(FALSE);
}
}
else
{
Enumeration e = attributes.keys();
while (e.hasMoreElements())
{
this.ordering.addElement(e.nextElement());
this.added.addElement(FALSE);
}
}
for (int i = 0; i != this.ordering.size(); i++)
{
DERObjectIdentifier oid = (DERObjectIdentifier)this.ordering.elementAt(i);
if (attributes.get(oid) == null)
{
throw new IllegalArgumentException("No attribute for object id - " + oid.getId() + " - passed to distinguished name");
}
this.values.addElement(attributes.get(oid)); // copy the hash table
}
}
/**
* Takes two vectors one of the oids and the other of the values.
*/
public X509Name(
Vector oids,
Vector values)
{
this(oids, values, new X509DefaultEntryConverter());
}
/**
* Takes two vectors one of the oids and the other of the values.
* <p>
* The passed in converter will be used to convert the strings into their
* ASN.1 counterparts.
*/
public X509Name(
Vector oids,
Vector values,
X509NameEntryConverter converter)
{
this.converter = converter;
if (oids.size() != values.size())
{
throw new IllegalArgumentException("oids vector must be same length as values.");
}
for (int i = 0; i < oids.size(); i++)
{
this.ordering.addElement(oids.elementAt(i));
this.values.addElement(values.elementAt(i));
this.added.addElement(FALSE);
}
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes.
*/
public X509Name(
String dirName)
{
this(DefaultReverse, DefaultLookUp, dirName);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes with each
* string value being converted to its associated ASN.1 type using the passed
* in converter.
*/
public X509Name(
String dirName,
X509NameEntryConverter converter)
{
this(DefaultReverse, DefaultLookUp, dirName, converter);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes. If reverse
* is true, create the encoded version of the sequence starting from the
* last element in the string.
*/
public X509Name(
boolean reverse,
String dirName)
{
this(reverse, DefaultLookUp, dirName);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes with each
* string value being converted to its associated ASN.1 type using the passed
* in converter. If reverse is true the ASN.1 sequence representing the DN will
* be built by starting at the end of the string, rather than the start.
*/
public X509Name(
boolean reverse,
String dirName,
X509NameEntryConverter converter)
{
this(reverse, DefaultLookUp, dirName, converter);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes. lookUp
* should provide a table of lookups, indexed by lowercase only strings and
* yielding a DERObjectIdentifier, other than that OID. and numeric oids
* will be processed automatically.
* <br>
* If reverse is true, create the encoded version of the sequence
* starting from the last element in the string.
* @param reverse true if we should start scanning from the end (RFC 2553).
* @param lookUp table of names and their oids.
* @param dirName the X.500 string to be parsed.
*/
public X509Name(
boolean reverse,
Hashtable lookUp,
String dirName)
{
this(reverse, lookUp, dirName, new X509DefaultEntryConverter());
}
private DERObjectIdentifier decodeOID(
String name,
Hashtable lookUp)
{
if (Strings.toUpperCase(name).startsWith("OID."))
{
return new DERObjectIdentifier(name.substring(4));
}
else if (name.charAt(0) >= '0' && name.charAt(0) <= '9')
{
return new DERObjectIdentifier(name);
}
DERObjectIdentifier oid = (DERObjectIdentifier)lookUp.get(Strings.toLowerCase(name));
if (oid == null)
{
throw new IllegalArgumentException("Unknown object id - " + name + " - passed to distinguished name");
}
return oid;
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes. lookUp
* should provide a table of lookups, indexed by lowercase only strings and
* yielding a DERObjectIdentifier, other than that OID. and numeric oids
* will be processed automatically. The passed in converter is used to convert the
* string values to the right of each equals sign to their ASN.1 counterparts.
* <br>
* @param reverse true if we should start scanning from the end, false otherwise.
* @param lookUp table of names and oids.
* @param dirName the string dirName
* @param converter the converter to convert string values into their ASN.1 equivalents
*/
public X509Name(
boolean reverse,
Hashtable lookUp,
String dirName,
X509NameEntryConverter converter)
{
this.converter = converter;
X509NameTokenizer nTok = new X509NameTokenizer(dirName);
while (nTok.hasMoreTokens())
{
String token = nTok.nextToken();
int index = token.indexOf('=');
if (index == -1)
{
throw new IllegalArgumentException("badly formated directory string");
}
String name = token.substring(0, index);
String value = token.substring(index + 1);
DERObjectIdentifier oid = decodeOID(name, lookUp);
if (value.indexOf('+') > 0)
{
X509NameTokenizer vTok = new X509NameTokenizer(value, '+');
this.ordering.addElement(oid);
this.values.addElement(vTok.nextToken());
this.added.addElement(FALSE);
while (vTok.hasMoreTokens())
{
String sv = vTok.nextToken();
int ndx = sv.indexOf('=');
String nm = sv.substring(0, ndx);
String vl = sv.substring(ndx + 1);
this.ordering.addElement(decodeOID(nm, lookUp));
this.values.addElement(vl);
this.added.addElement(TRUE);
}
}
else
{
this.ordering.addElement(oid);
this.values.addElement(value);
this.added.addElement(FALSE);
}
}
if (reverse)
{
Vector o = new Vector();
Vector v = new Vector();
Vector a = new Vector();
int count = 1;
for (int i = 0; i < this.ordering.size(); i++)
{
if (((Boolean)this.added.elementAt(i)).booleanValue())
{
o.insertElementAt(this.ordering.elementAt(i), count);
v.insertElementAt(this.values.elementAt(i), count);
a.insertElementAt(this.added.elementAt(i), count);
count++;
}
else
{
o.insertElementAt(this.ordering.elementAt(i), 0);
v.insertElementAt(this.values.elementAt(i), 0);
a.insertElementAt(this.added.elementAt(i), 0);
count = 1;
}
}
this.ordering = o;
this.values = v;
this.added = a;
}
}
/**
* return a vector of the oids in the name, in the order they were found.
*/
public Vector getOIDs()
{
Vector v = new Vector();
for (int i = 0; i != ordering.size(); i++)
{
v.addElement(ordering.elementAt(i));
}
return v;
}
/**
* return a vector of the values found in the name, in the order they
* were found.
*/
public Vector getValues()
{
Vector v = new Vector();
for (int i = 0; i != values.size(); i++)
{
v.addElement(values.elementAt(i));
}
return v;
}
/**
* return a vector of the values found in the name, in the order they
* were found, with the DN label corresponding to passed in oid.
*/
public Vector getValues(
DERObjectIdentifier oid)
{
Vector v = new Vector();
for (int i = 0; i != values.size(); i++)
{
if (ordering.elementAt(i).equals(oid))
{
v.addElement(values.elementAt(i));
}
}
return v;
}
public DERObject toASN1Object()
{
if (seq == null)
{
ASN1EncodableVector vec = new ASN1EncodableVector();
ASN1EncodableVector sVec = new ASN1EncodableVector();
DERObjectIdentifier lstOid = null;
for (int i = 0; i != ordering.size(); i++)
{
ASN1EncodableVector v = new ASN1EncodableVector();
DERObjectIdentifier oid = (DERObjectIdentifier)ordering.elementAt(i);
v.add(oid);
String str = (String)values.elementAt(i);
v.add(converter.getConvertedValue(oid, str));
if (lstOid == null
|| ((Boolean)this.added.elementAt(i)).booleanValue())
{
sVec.add(new DERSequence(v));
}
else
{
vec.add(new DERSet(sVec));
sVec = new ASN1EncodableVector();
sVec.add(new DERSequence(v));
}
lstOid = oid;
}
vec.add(new DERSet(sVec));
seq = new DERSequence(vec);
}
return seq;
}
/**
* @param inOrder if true the order of both X509 names must be the same,
* as well as the values associated with each element.
*/
public boolean equals(Object obj, boolean inOrder)
{
if (!inOrder)
{
return this.equals(obj);
}
if (obj == this)
{
return true;
}
if (!(obj instanceof X509Name || obj instanceof ASN1Sequence))
{
return false;
}
DERObject derO = ((DEREncodable)obj).getDERObject();
if (this.getDERObject().equals(derO))
{
return true;
}
X509Name other;
try
{
other = X509Name.getInstance(obj);
}
catch (IllegalArgumentException e)
{
return false;
}
int orderingSize = ordering.size();
if (orderingSize != other.ordering.size())
{
return false;
}
for (int i = 0; i < orderingSize; i++)
{
DERObjectIdentifier oid = (DERObjectIdentifier)ordering.elementAt(i);
DERObjectIdentifier oOid = (DERObjectIdentifier)other.ordering.elementAt(i);
if (oid.equals(oOid))
{
String value = (String)values.elementAt(i);
String oValue = (String)other.values.elementAt(i);
if (!equivalentStrings(value, oValue))
{
return false;
}
}
else
{
return false;
}
}
return true;
}
/**
* test for equality - note: case is ignored.
*/
public boolean equals(Object obj)
{
if (obj == this)
{
return true;
}
if (!(obj instanceof X509Name || obj instanceof ASN1Sequence))
{
return false;
}
DERObject derO = ((DEREncodable)obj).getDERObject();
if (this.getDERObject().equals(derO))
{
return true;
}
X509Name other;
try
{
other = X509Name.getInstance(obj);
}
catch (IllegalArgumentException e)
{
return false;
}
int orderingSize = ordering.size();
if (orderingSize != other.ordering.size())
{
return false;
}
boolean[] indexes = new boolean[orderingSize];
int start, end, delta;
if (ordering.elementAt(0).equals(other.ordering.elementAt(0))) // guess forward
{
start = 0;
end = orderingSize;
delta = 1;
}
else // guess reversed - most common problem
{
start = orderingSize - 1;
end = -1;
delta = -1;
}
for (int i = start; i != end; i += delta)
{
boolean found = false;
DERObjectIdentifier oid = (DERObjectIdentifier)ordering.elementAt(i);
String value = (String)values.elementAt(i);
for (int j = 0; j < orderingSize; j++)
{
if (indexes[j])
{
continue;
}
DERObjectIdentifier oOid = (DERObjectIdentifier)other.ordering.elementAt(j);
if (oid.equals(oOid))
{
String oValue = (String)other.values.elementAt(j);
if (equivalentStrings(value, oValue))
{
indexes[j] = true;
found = true;
break;
}
}
}
if (!found)
{
return false;
}
}
return true;
}
private boolean equivalentStrings(String s1, String s2)
{
String value = Strings.toLowerCase(s1.trim());
String oValue = Strings.toLowerCase(s2.trim());
if (!value.equals(oValue))
{
value = stripInternalSpaces(value);
oValue = stripInternalSpaces(oValue);
if (!value.equals(oValue))
{
return false;
}
}
return true;
}
private String stripInternalSpaces(
String str)
{
StringBuffer res = new StringBuffer();
if (str.length() != 0)
{
char c1 = str.charAt(0);
res.append(c1);
for (int k = 1; k < str.length(); k++)
{
char c2 = str.charAt(k);
if (!(c1 == ' ' && c2 == ' '))
{
res.append(c2);
}
c1 = c2;
}
}
return res.toString();
}
public int hashCode()
{
ASN1Sequence seq = (ASN1Sequence)this.getDERObject();
Enumeration e = seq.getObjects();
int hashCode = 0;
while (e.hasMoreElements())
{
hashCode ^= e.nextElement().hashCode();
}
return hashCode;
}
private void appendValue(
StringBuffer buf,
Hashtable oidSymbols,
DERObjectIdentifier oid,
String value)
{
String sym = (String)oidSymbols.get(oid);
if (sym != null)
{
buf.append(sym);
}
else
{
buf.append(oid.getId());
}
buf.append('=');
int index = buf.length();
buf.append(value);
int end = buf.length();
while (index != end)
{
if ((buf.charAt(index) == ',')
|| (buf.charAt(index) == '"')
|| (buf.charAt(index) == '\\')
|| (buf.charAt(index) == '+')
|| (buf.charAt(index) == '<')
|| (buf.charAt(index) == '>')
|| (buf.charAt(index) == ';'))
{
buf.insert(index, "\\");
index++;
end++;
}
index++;
}
}
/**
* convert the structure to a string - if reverse is true the
* oids and values are listed out starting with the last element
* in the sequence (ala RFC 2253), otherwise the string will begin
* with the first element of the structure. If no string definition
* for the oid is found in oidSymbols the string value of the oid is
* added. Two standard symbol tables are provided DefaultSymbols, and
* RFC2253Symbols as part of this class.
*
* @param reverse if true start at the end of the sequence and work back.
* @param oidSymbols look up table strings for oids.
*/
public String toString(
boolean reverse,
Hashtable oidSymbols)
{
StringBuffer buf = new StringBuffer();
Vector components = new Vector();
boolean first = true;
StringBuffer ava = null;
for (int i = 0; i < ordering.size(); i++)
{
if (((Boolean)added.elementAt(i)).booleanValue())
{
ava.append('+');
appendValue(ava, oidSymbols,
(DERObjectIdentifier)ordering.elementAt(i),
(String)values.elementAt(i));
}
else
{
ava = new StringBuffer();
appendValue(ava, oidSymbols,
(DERObjectIdentifier)ordering.elementAt(i),
(String)values.elementAt(i));
components.addElement(ava);
}
}
if (reverse)
{
for (int i = components.size() - 1; i >= 0; i
{
if (first)
{
first = false;
}
else
{
buf.append(',');
}
buf.append(components.elementAt(i).toString());
}
}
else
{
for (int i = 0; i < components.size(); i++)
{
if (first)
{
first = false;
}
else
{
buf.append(',');
}
buf.append(components.elementAt(i).toString());
}
}
return buf.toString();
}
private String bytesToString(
byte[] data)
{
char[] cs = new char[data.length];
for (int i = 0; i != cs.length; i++)
{
cs[i] = (char)(data[i] & 0xff);
}
return new String(cs);
}
public String toString()
{
return toString(DefaultReverse, DefaultSymbols);
}
}
|
package Minesweeper;
import game.CharBoard;
import game.Coordinate;
import game.PieceType;
public class MinesweeperBoard extends CharBoard {
/** This character represents a bomb */
public static final PieceType BOMB = PieceType.X;
/** This character represents a revealed square */
public static final PieceType OPEN = PieceType.O;
/** This 2d array shows the user the number of bombs around each square */
private final int[][] visibleBoard;
/**
* This board keeps track of the size of the board because the value is used
* often
*/
private final int size;
/** This is the number of bombs that should be on the board */
private final int numOfBombs;
// The previous board tracks the revealed squares and the squares with bombs
// on them
public MinesweeperBoard(int size) {
super(size);
visibleBoard = new int[size][size];
this.size = size;
numOfBombs = (size * size + 4) / 5;
generateVisibleBoard();
}
/**
* This method gives an initial value to every integer inside of the array
* corresponding to the number surrounding that square.
*/
public void generateVisibleBoard() {
for (int j = 0; j < size; j++) {
for (int i = 0; i < size; i++) {
if (getBoard()[j][i] == BOMB.getChar())
visibleBoard[j][i] = 9; // a value of 9 means that the
// square has a bomb on it
else
visibleBoard[j][i] = numOfSurroundingBombs(j, i);
}
}
}
/**
* This method counts the number of bombs surrounding a square
*
* @param y
* a y coordinate
* @param x
* an x coordinate
* @return the total number of bombs surrounding a square
*/
public int numOfSurroundingBombs(int y, int x) {
int bombs = 0;
int[] yOffset = { 1, 1, 1, 0, 0, -1, -1, -1 };
int[] xOffset = { -1, 0, 1, -1, 1, -1, 0, 1 };
Coordinate c;
for (int i = 0; i < 8; i++) {
c = new Coordinate(x + xOffset[i], y + yOffset[i]);
if (squareExists(c)) {
bombs += hasBomb(c.getY(), c.getX());
}
}
return bombs;
}
/**
* This method takes a coordinate and returns 1 if a bomb is on the square
*
* @param y
* a y coordinate
* @param x
* an x coordinate
* @return 1 if there is a bomb and 0 if there is not a bomb
*/
private int hasBomb(int y, int x) {
if (getBoard()[y][x] == BOMB.getChar())
return 1;
return 0;
}
/**
* This method verifies that a square can be revealed. For Minesweeper, this
* involves checking if the square had been reveled before.
*
* @param piece
* the piece that will be checked. Only an OPEN PieceType should
* be given
* @param c
* the coordinates that will be checked
* @return if the square can be revealed
*/
@Override
public boolean canPlace(PieceType piece, Coordinate c) {
return getBoard()[c.getX()][c.getY()] != OPEN.getChar();
}
/**
* This method detects if the user had ended the game by either revealed a
* bomb (a loss) or filling the board (a win).
*
* @return a character that corresponds to a win or loss
*/
@Override
public char gameEnd() {
// The user had removed a bomb by revealing it
if (countBombs() != numOfBombs)
return BOMB.getChar();
// The user has not revealed every square
Coordinate c;
for (int j = 0; j < size; j++)
for (int i = 0; i < size; i++) {
c = new Coordinate(j, i);
if (isEmpty(c))
return ' ';
}
// The user has revealed every square
return OPEN.getChar();
}
public int countBombs() {
// TODO
return 0;
}
/**
* For every column, put a number. For every row, put a letter. Place the
* number of surrounding squares on each square that is revealed.
*
* @return a text created board with all the revealed number squares shown
*/
@Override
public String toString() {
char[][] tempBoard = getBoard();
String out = " ";
for (int i = 0; i < tempBoard.length; i++)
out += " " + (i + 1);
out += "\n";
char letter = 'A';
for (int j = 0; j < tempBoard[0].length; j++) {
out += "" + letter++ + " " + showVisibleBoard(j, 0);
for (int i = 1; i < tempBoard.length; i++) {
out += "|" + showVisibleBoard(j, i);
}
if (j < tempBoard[0].length - 1) {
out += "\n -";
for (int i = 1; i < tempBoard.length; i++)
out += "
out += "\n";
}
}
out += "\n";
return out;
}
private char showVisibleBoard(int y, int x) {
if (getBoard()[y][x] == OPEN.getChar()) {
// if the value is 9, it is a bomb so show a bomb
if (visibleBoard[y][x] == 9) return BOMB.getChar();
// if the value is not 9, then show the value
return Integer.toString(visibleBoard[y][x]).charAt(0);
}
// otherwise, just show the piece (which should be ' ')
return getBoard()[y][x];
}
}
|
package org.bouncycastle.asn1.x509;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.Vector;
import org.bouncycastle.asn1.*;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.util.encoders.Hex;
/**
* <pre>
* RDNSequence ::= SEQUENCE OF RelativeDistinguishedName
*
* RelativeDistinguishedName ::= SET SIZE (1..MAX) OF AttributeTypeAndValue
*
* AttributeTypeAndValue ::= SEQUENCE {
* type OBJECT IDENTIFIER,
* value ANY }
* </pre>
*/
public class X509Name
extends ASN1Encodable
{
/**
* country code - StringType(SIZE(2))
*/
public static final DERObjectIdentifier C = new DERObjectIdentifier("2.5.4.6");
/**
* organization - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier O = new DERObjectIdentifier("2.5.4.10");
/**
* organizational unit name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier OU = new DERObjectIdentifier("2.5.4.11");
/**
* Title
*/
public static final DERObjectIdentifier T = new DERObjectIdentifier("2.5.4.12");
/**
* common name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier CN = new DERObjectIdentifier("2.5.4.3");
/**
* device serial number name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier SN = new DERObjectIdentifier("2.5.4.5");
/**
* locality name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier L = new DERObjectIdentifier("2.5.4.7");
/**
* state, or province name - StringType(SIZE(1..64))
*/
public static final DERObjectIdentifier ST = new DERObjectIdentifier("2.5.4.8");
/**
* Naming attributes of type X520name
*/
public static final DERObjectIdentifier SURNAME = new DERObjectIdentifier("2.5.4.4");
public static final DERObjectIdentifier GIVENNAME = new DERObjectIdentifier("2.5.4.42");
public static final DERObjectIdentifier INITIALS = new DERObjectIdentifier("2.5.4.43");
public static final DERObjectIdentifier GENERATION = new DERObjectIdentifier("2.5.4.44");
public static final DERObjectIdentifier UNIQUE_IDENTIFIER = new DERObjectIdentifier("2.5.4.45");
/**
* Email address (RSA PKCS#9 extension) - IA5String.
* <p>Note: if you're trying to be ultra orthodox, don't use this! It shouldn't be in here.
*/
public static final DERObjectIdentifier EmailAddress = PKCSObjectIdentifiers.pkcs_9_at_emailAddress;
/**
* more from PKCS#9
*/
public static final DERObjectIdentifier UnstructuredName = PKCSObjectIdentifiers.pkcs_9_at_unstructuredName;
public static final DERObjectIdentifier UnstructuredAddress = PKCSObjectIdentifiers.pkcs_9_at_unstructuredAddress;
/**
* email address in Verisign certificates
*/
public static final DERObjectIdentifier E = EmailAddress;
/*
* others...
*/
public static final DERObjectIdentifier DC = new DERObjectIdentifier("0.9.2342.19200300.100.1.25");
/**
* LDAP User id.
*/
public static final DERObjectIdentifier UID = new DERObjectIdentifier("0.9.2342.19200300.100.1.1");
/**
* look up table translating OID values into their common symbols - this static is scheduled for deletion
*/
public static Hashtable OIDLookUp = new Hashtable();
/**
* determines whether or not strings should be processed and printed
* from back to front.
*/
public static boolean DefaultReverse = false;
/**
* default look up table translating OID values into their common symbols following
* the convention in RFC 2253 with a few extras
*/
public static Hashtable DefaultSymbols = OIDLookUp;
/**
* look up table translating OID values into their common symbols following the convention in RFC 2253
* with a few extras
*/
public static Hashtable RFC2253Symbols = new Hashtable();
/**
* look up table translating string values into their OIDS -
* this static is scheduled for deletion
*/
public static Hashtable SymbolLookUp = new Hashtable();
/**
* look up table translating common symbols into their OIDS.
*/
public static Hashtable DefaultLookUp = SymbolLookUp;
static
{
DefaultSymbols.put(C, "C");
DefaultSymbols.put(O, "O");
DefaultSymbols.put(T, "T");
DefaultSymbols.put(OU, "OU");
DefaultSymbols.put(CN, "CN");
DefaultSymbols.put(L, "L");
DefaultSymbols.put(ST, "ST");
DefaultSymbols.put(SN, "SN");
DefaultSymbols.put(EmailAddress, "E");
DefaultSymbols.put(DC, "DC");
DefaultSymbols.put(UID, "UID");
DefaultSymbols.put(SURNAME, "SURNAME");
DefaultSymbols.put(GIVENNAME, "GIVENNAME");
DefaultSymbols.put(INITIALS, "INITIALS");
DefaultSymbols.put(GENERATION, "GENERATION");
DefaultSymbols.put(UnstructuredAddress, "unstructuredAddress");
DefaultSymbols.put(UnstructuredName, "unstructuredName");
RFC2253Symbols.put(C, "C");
RFC2253Symbols.put(O, "O");
RFC2253Symbols.put(T, "T");
RFC2253Symbols.put(OU, "OU");
RFC2253Symbols.put(CN, "CN");
RFC2253Symbols.put(L, "L");
RFC2253Symbols.put(ST, "ST");
RFC2253Symbols.put(SN, "SN");
RFC2253Symbols.put(EmailAddress, "EMAILADDRESS");
RFC2253Symbols.put(DC, "DC");
RFC2253Symbols.put(UID, "UID");
RFC2253Symbols.put(SURNAME, "SURNAME");
RFC2253Symbols.put(GIVENNAME, "GIVENNAME");
RFC2253Symbols.put(INITIALS, "INITIALS");
RFC2253Symbols.put(GENERATION, "GENERATION");
DefaultLookUp.put("c", C);
DefaultLookUp.put("o", O);
DefaultLookUp.put("t", T);
DefaultLookUp.put("ou", OU);
DefaultLookUp.put("cn", CN);
DefaultLookUp.put("l", L);
DefaultLookUp.put("st", ST);
DefaultLookUp.put("sn", SN);
DefaultLookUp.put("emailaddress", E);
DefaultLookUp.put("dc", DC);
DefaultLookUp.put("e", E);
DefaultLookUp.put("uid", UID);
DefaultLookUp.put("surname", SURNAME);
DefaultLookUp.put("givenname", GIVENNAME);
DefaultLookUp.put("initials", INITIALS);
DefaultLookUp.put("generation", GENERATION);
DefaultLookUp.put("unstructuredaddress", UnstructuredAddress);
DefaultLookUp.put("unstructuredname", UnstructuredName);
}
private X509NameEntryConverter converter = null;
private Vector ordering = new Vector();
private Vector values = new Vector();
private Vector added = new Vector();
private ASN1Sequence seq;
/**
* Return a X509Name based on the passed in tagged object.
*
* @param obj tag object holding name.
* @param explicit true if explicitly tagged false otherwise.
* @return the X509Name
*/
public static X509Name getInstance(
ASN1TaggedObject obj,
boolean explicit)
{
return getInstance(ASN1Sequence.getInstance(obj, explicit));
}
public static X509Name getInstance(
Object obj)
{
if (obj == null || obj instanceof X509Name)
{
return (X509Name)obj;
}
else if (obj instanceof ASN1Sequence)
{
return new X509Name((ASN1Sequence)obj);
}
throw new IllegalArgumentException("unknown object in factory");
}
/**
* Constructor from ASN1Sequence
*
* the principal will be a list of constructed sets, each containing an (OID, String) pair.
*/
public X509Name(
ASN1Sequence seq)
{
this.seq = seq;
Enumeration e = seq.getObjects();
while (e.hasMoreElements())
{
ASN1Set set = (ASN1Set)e.nextElement();
for (int i = 0; i < set.size(); i++)
{
ASN1Sequence s = (ASN1Sequence)set.getObjectAt(i);
ordering.addElement(s.getObjectAt(0));
DEREncodable value = s.getObjectAt(1);
if (value instanceof DERString)
{
values.addElement(((DERString)value).getString());
}
else
{
values.addElement("#" + bytesToString(Hex.encode(value.getDERObject().getDEREncoded())));
}
added.addElement((i != 0) ? new Boolean(true) : new Boolean(false));
}
}
}
/**
* constructor from a table of attributes.
* <p>
* it's is assumed the table contains OID/String pairs, and the contents
* of the table are copied into an internal table as part of the
* construction process.
* <p>
* <b>Note:</b> if the name you are trying to generate should be
* following a specific ordering, you should use the constructor
* with the ordering specified below.
*/
public X509Name(
Hashtable attributes)
{
this(null, attributes);
}
/**
* Constructor from a table of attributes with ordering.
* <p>
* it's is assumed the table contains OID/String pairs, and the contents
* of the table are copied into an internal table as part of the
* construction process. The ordering vector should contain the OIDs
* in the order they are meant to be encoded or printed in toString.
*/
public X509Name(
Vector ordering,
Hashtable attributes)
{
this(ordering, attributes, new X509DefaultEntryConverter());
}
/**
* Constructor from a table of attributes with ordering.
* <p>
* it's is assumed the table contains OID/String pairs, and the contents
* of the table are copied into an internal table as part of the
* construction process. The ordering vector should contain the OIDs
* in the order they are meant to be encoded or printed in toString.
* <p>
* The passed in converter will be used to convert the strings into their
* ASN.1 counterparts.
*/
public X509Name(
Vector ordering,
Hashtable attributes,
X509DefaultEntryConverter converter)
{
this.converter = converter;
if (ordering != null)
{
for (int i = 0; i != ordering.size(); i++)
{
this.ordering.addElement(ordering.elementAt(i));
this.added.addElement(new Boolean(false));
}
}
else
{
Enumeration e = attributes.keys();
while (e.hasMoreElements())
{
this.ordering.addElement(e.nextElement());
this.added.addElement(new Boolean(false));
}
}
for (int i = 0; i != this.ordering.size(); i++)
{
DERObjectIdentifier oid = (DERObjectIdentifier)this.ordering.elementAt(i);
if (attributes.get(oid) == null)
{
throw new IllegalArgumentException("No attribute for object id - " + oid.getId() + " - passed to distinguished name");
}
this.values.addElement(attributes.get(oid)); // copy the hash table
}
}
/**
* Takes two vectors one of the oids and the other of the values.
*/
public X509Name(
Vector oids,
Vector values)
{
this(oids, values, new X509DefaultEntryConverter());
}
/**
* Takes two vectors one of the oids and the other of the values.
* <p>
* The passed in converter will be used to convert the strings into their
* ASN.1 counterparts.
*/
public X509Name(
Vector oids,
Vector values,
X509NameEntryConverter converter)
{
this.converter = converter;
if (oids.size() != values.size())
{
throw new IllegalArgumentException("oids vector must be same length as values.");
}
for (int i = 0; i < oids.size(); i++)
{
this.ordering.addElement(oids.elementAt(i));
this.values.addElement(values.elementAt(i));
this.added.addElement(new Boolean(false));
}
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes.
*/
public X509Name(
String dirName)
{
this(DefaultReverse, DefaultLookUp, dirName);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes with each
* string value being converted to its associated ASN.1 type using the passed
* in converter.
*/
public X509Name(
String dirName,
X509NameEntryConverter converter)
{
this(DefaultReverse, DefaultLookUp, dirName, converter);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes. If reverse
* is true, create the encoded version of the sequence starting from the
* last element in the string.
*/
public X509Name(
boolean reverse,
String dirName)
{
this(reverse, DefaultLookUp, dirName);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes with each
* string value being converted to its associated ASN.1 type using the passed
* in converter. If reverse is true the ASN.1 sequence representing the DN will
* be built by starting at the end of the string, rather than the start.
*/
public X509Name(
boolean reverse,
String dirName,
X509NameEntryConverter converter)
{
this(reverse, DefaultLookUp, dirName, converter);
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes. lookUp
* should provide a table of lookups, indexed by lowercase only strings and
* yielding a DERObjectIdentifier, other than that OID. and numeric oids
* will be processed automatically.
* <br>
* If reverse is true, create the encoded version of the sequence
* starting from the last element in the string.
* @param reverse true if we should start scanning from the end (RFC 2553).
* @param lookUp table of names and their oids.
* @param dirName the X.500 string to be parsed.
*/
public X509Name(
boolean reverse,
Hashtable lookUp,
String dirName)
{
this(reverse, lookUp, dirName, new X509DefaultEntryConverter());
}
private DERObjectIdentifier decodeOID(
String name,
Hashtable lookUp)
{
if (name.toUpperCase().startsWith("OID."))
{
return new DERObjectIdentifier(name.substring(4));
}
else if (name.charAt(0) >= '0' && name.charAt(0) <= '9')
{
return new DERObjectIdentifier(name);
}
DERObjectIdentifier oid = (DERObjectIdentifier)lookUp.get(name.toLowerCase());
if (oid == null)
{
throw new IllegalArgumentException("Unknown object id - " + name + " - passed to distinguished name");
}
return oid;
}
/**
* Takes an X509 dir name as a string of the format "C=AU, ST=Victoria", or
* some such, converting it into an ordered set of name attributes. lookUp
* should provide a table of lookups, indexed by lowercase only strings and
* yielding a DERObjectIdentifier, other than that OID. and numeric oids
* will be processed automatically. The passed in converter is used to convert the
* string values to the right of each equals sign to their ASN.1 counterparts.
* <br>
* @param reverse true if we should start scanning from the end, false otherwise.
* @param lookUp table of names and oids.
* @param dirName the string dirName
* @param converter the converter to convert string values into their ASN.1 equivalents
*/
public X509Name(
boolean reverse,
Hashtable lookUp,
String dirName,
X509NameEntryConverter converter)
{
this.converter = converter;
X509NameTokenizer nTok = new X509NameTokenizer(dirName);
while (nTok.hasMoreTokens())
{
String token = nTok.nextToken();
int index = token.indexOf('=');
if (index == -1)
{
throw new IllegalArgumentException("badly formated directory string");
}
String name = token.substring(0, index);
String value = token.substring(index + 1);
DERObjectIdentifier oid = decodeOID(name, lookUp);
if (value.indexOf('+') > 0)
{
X509NameTokenizer vTok = new X509NameTokenizer(value, '+');
this.ordering.addElement(oid);
this.values.addElement(vTok.nextToken());
this.added.addElement(new Boolean(false));
while (vTok.hasMoreTokens())
{
String sv = vTok.nextToken();
int ndx = sv.indexOf('=');
String nm = sv.substring(0, ndx);
String vl = sv.substring(ndx + 1);
this.ordering.addElement(decodeOID(nm, lookUp));
this.values.addElement(vl);
this.added.addElement(new Boolean(true));
}
}
else
{
this.ordering.addElement(oid);
this.values.addElement(value);
this.added.addElement(new Boolean(false));
}
}
if (reverse)
{
Vector o = new Vector();
Vector v = new Vector();
Vector a = new Vector();
for (int i = this.ordering.size() - 1; i >= 0; i
{
o.addElement(this.ordering.elementAt(i));
v.addElement(this.values.elementAt(i));
a.addElement(this.added.elementAt(i));
}
this.ordering = o;
this.values = v;
this.added = a;
}
}
/**
* return a vector of the oids in the name, in the order they were found.
*/
public Vector getOIDs()
{
Vector v = new Vector();
for (int i = 0; i != ordering.size(); i++)
{
v.addElement(ordering.elementAt(i));
}
return v;
}
/**
* return a vector of the values found in the name, in the order they
* were found.
*/
public Vector getValues()
{
Vector v = new Vector();
for (int i = 0; i != values.size(); i++)
{
v.addElement(values.elementAt(i));
}
return v;
}
public DERObject toASN1Object()
{
if (seq == null)
{
ASN1EncodableVector vec = new ASN1EncodableVector();
ASN1EncodableVector sVec = new ASN1EncodableVector();
DERObjectIdentifier lstOid = null;
for (int i = 0; i != ordering.size(); i++)
{
ASN1EncodableVector v = new ASN1EncodableVector();
DERObjectIdentifier oid = (DERObjectIdentifier)ordering.elementAt(i);
v.add(oid);
String str = (String)values.elementAt(i);
v.add(converter.getConvertedValue(oid, str));
if (lstOid == null
|| ((Boolean)this.added.elementAt(i)).booleanValue())
{
sVec.add(new DERSequence(v));
}
else
{
vec.add(new DERSet(sVec));
sVec = new ASN1EncodableVector();
sVec.add(new DERSequence(v));
}
lstOid = oid;
}
vec.add(new DERSet(sVec));
seq = new DERSequence(vec);
}
return seq;
}
/**
* @param inOrder if true the order of both X509 names must be the same,
* as well as the values associated with each element.
*/
public boolean equals(Object _obj, boolean inOrder)
{
if (_obj == this)
{
return true;
}
if (!inOrder)
{
return this.equals(_obj);
}
if (_obj == null || !(_obj instanceof X509Name))
{
return false;
}
X509Name _oxn = (X509Name)_obj;
int _orderingSize = ordering.size();
if (_orderingSize != _oxn.ordering.size())
{
return false;
}
for(int i = 0; i < _orderingSize; i++)
{
String _oid = ((DERObjectIdentifier)ordering.elementAt(i)).getId();
String _val = (String)values.elementAt(i);
String _oOID = ((DERObjectIdentifier)_oxn.ordering.elementAt(i)).getId();
String _oVal = (String)_oxn.values.elementAt(i);
if (_oid.equals(_oOID))
{
_val = _val.trim().toLowerCase();
_oVal = _oVal.trim().toLowerCase();
if (_val.equals(_oVal))
{
continue;
}
else
{
StringBuffer v1 = new StringBuffer();
StringBuffer v2 = new StringBuffer();
if (_val.length() != 0)
{
char c1 = _val.charAt(0);
v1.append(c1);
for (int k = 1; k < _val.length(); k++)
{
char c2 = _val.charAt(k);
if (!(c1 == ' ' && c2 == ' '))
{
v1.append(c2);
}
c1 = c2;
}
}
if (_oVal.length() != 0)
{
char c1 = _oVal.charAt(0);
v2.append(c1);
for (int k = 1; k < _oVal.length(); k++)
{
char c2 = _oVal.charAt(k);
if (!(c1 == ' ' && c2 == ' '))
{
v2.append(c2);
}
c1 = c2;
}
}
if (!v1.toString().equals(v2.toString()))
{
return false;
}
}
}
else
{
return false;
}
}
return true;
}
/**
* test for equality - note: case is ignored.
*/
public boolean equals(Object _obj)
{
if (_obj == this)
{
return true;
}
if (!(_obj instanceof X509Name || _obj instanceof ASN1Sequence))
{
return false;
}
DERObject derO = ((DEREncodable)_obj).getDERObject();
if (this.getDERObject().equals(derO))
{
return true;
}
if (!(_obj instanceof X509Name))
{
return false;
}
X509Name _oxn = (X509Name)_obj;
int _orderingSize = ordering.size();
if (_orderingSize != _oxn.ordering.size())
{
return false;
}
boolean[] _indexes = new boolean[_orderingSize];
for(int i = 0; i < _orderingSize; i++)
{
boolean _found = false;
String _oid = ((DERObjectIdentifier)ordering.elementAt(i)).getId();
String _val = (String)values.elementAt(i);
for(int j = 0; j < _orderingSize; j++)
{
if (_indexes[j])
{
continue;
}
String _oOID = ((DERObjectIdentifier)_oxn.ordering.elementAt(j)).getId();
String _oVal = (String)_oxn.values.elementAt(j);
if (_oid.equals(_oOID))
{
_val = _val.trim().toLowerCase();
_oVal = _oVal.trim().toLowerCase();
if (_val.equals(_oVal))
{
_indexes[j] = true;
_found = true;
break;
}
else
{
StringBuffer v1 = new StringBuffer();
StringBuffer v2 = new StringBuffer();
if (_val.length() != 0)
{
char c1 = _val.charAt(0);
v1.append(c1);
for (int k = 1; k < _val.length(); k++)
{
char c2 = _val.charAt(k);
if (!(c1 == ' ' && c2 == ' '))
{
v1.append(c2);
}
c1 = c2;
}
}
if (_oVal.length() != 0)
{
char c1 = _oVal.charAt(0);
v2.append(c1);
for (int k = 1; k < _oVal.length(); k++)
{
char c2 = _oVal.charAt(k);
if (!(c1 == ' ' && c2 == ' '))
{
v2.append(c2);
}
c1 = c2;
}
}
if (v1.toString().equals(v2.toString()))
{
_indexes[j] = true;
_found = true;
break;
}
}
}
}
if(!_found)
{
return false;
}
}
return true;
}
public int hashCode()
{
ASN1Sequence seq = (ASN1Sequence)this.getDERObject();
Enumeration e = seq.getObjects();
int hashCode = 0;
while (e.hasMoreElements())
{
hashCode ^= e.nextElement().hashCode();
}
return hashCode;
}
private void appendValue(
StringBuffer buf,
Hashtable oidSymbols,
DERObjectIdentifier oid,
String value)
{
String sym = (String)oidSymbols.get(oid);
if (sym != null)
{
buf.append(sym);
}
else
{
buf.append(oid.getId());
}
buf.append("=");
int index = buf.length();
buf.append(value);
int end = buf.length();
while (index != end)
{
if ((buf.charAt(index) == ',')
|| (buf.charAt(index) == '"')
|| (buf.charAt(index) == '\\')
|| (buf.charAt(index) == '+')
|| (buf.charAt(index) == '<')
|| (buf.charAt(index) == '>')
|| (buf.charAt(index) == ';'))
{
buf.insert(index, "\\");
index++;
end++;
}
index++;
}
}
/**
* convert the structure to a string - if reverse is true the
* oids and values are listed out starting with the last element
* in the sequence (ala RFC 2253), otherwise the string will begin
* with the first element of the structure. If no string definition
* for the oid is found in oidSymbols the string value of the oid is
* added. Two standard symbol tables are provided DefaultSymbols, and
* RFC2253Symbols as part of this class.
*
* @param reverse if true start at the end of the sequence and work back.
* @param oidSymbols look up table strings for oids.
*/
public String toString(
boolean reverse,
Hashtable oidSymbols)
{
StringBuffer buf = new StringBuffer();
boolean first = true;
if (reverse)
{
for (int i = ordering.size() - 1; i >= 0; i
{
if (first)
{
first = false;
}
else
{
if (((Boolean)added.elementAt(i + 1)).booleanValue())
{
buf.append("+");
}
else
{
buf.append(",");
}
}
appendValue(buf, oidSymbols,
(DERObjectIdentifier)ordering.elementAt(i),
(String)values.elementAt(i));
}
}
else
{
for (int i = 0; i < ordering.size(); i++)
{
if (first)
{
first = false;
}
else
{
if (((Boolean)added.elementAt(i)).booleanValue())
{
buf.append("+");
}
else
{
buf.append(",");
}
}
appendValue(buf, oidSymbols,
(DERObjectIdentifier)ordering.elementAt(i),
(String)values.elementAt(i));
}
}
return buf.toString();
}
private String bytesToString(
byte[] data)
{
char[] cs = new char[data.length];
for (int i = 0; i != cs.length; i++)
{
cs[i] = (char)(data[i] & 0xff);
}
return new String(cs);
}
public String toString()
{
return toString(DefaultReverse, DefaultSymbols);
}
}
|
//$Id: KeyStoreGenerator.java,v 1.2 2005/01/07 15:15:26 steview Exp $
package org.jgroups.demos;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.security.KeyStore;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
/**
* Generates a keystore file that has a SecretKey in it. It is not possible to
* use the keytool JDk tool to achieve this. This is a simple way to generate
* a JCEKS format keystore and SecretKey.
*
* Usage is --alg ALGNAME --size ALGSIZE --storeName FILENAME --storePass PASSWORD --alias KEYALIAS
*
* Any of args are optional and will default to
* <ul>
* <li>ALGNAME = Blowfish
* <li>ALGSIZE = 56
* <li>FILENAME = defaultStore.keystore
* <li>PASSWORD = changeit
* <li>ALIAS = mykey
* </ul>
*
* @author S Woodcock
*
*/
public class KeyStoreGenerator {
static String symAlg = "Blowfish";
static int keySize =56;
static String keyStoreName = "defaultStore.keystore";
static String storePass = "changeit";
static String alias = "myKey";
public static void main(String[] args)
{
int i = 0, j;
String arg =null;;
boolean specified =false;
while (i < args.length && args[i].startsWith("-")) {
arg = args[i++];
System.out.println("Found arg of " + arg);
if (arg.equalsIgnoreCase("--alg")){
if (i<args.length){
symAlg = args[i++];
}else{
System.out.println("No Algorithm supplied using default of "+ symAlg);
}
}
else if (arg.equalsIgnoreCase("--size")){
if (i<args.length){
keySize = Integer.parseInt(args[i++]);
}else{
System.out.println("No Size supplied using default of "+keySize);
}
}else if (arg.equalsIgnoreCase("--storeName")){
if (i<args.length){
String temp = args[i++];
keyStoreName = temp;
}else{
System.out.println("No keystore supplied using default of "+keyStoreName);
}
}
else if (arg.equalsIgnoreCase("--storePass")){
if (i<args.length){
storePass = args[i++];
}else{
System.out.println("No password supplied using default of "+storePass);
}
}
else if (arg.equalsIgnoreCase("--alias")){
if (i<args.length){
alias = args[i++];
}else{
System.out.println("No alias supplied using default of "+alias);
}
}
}
System.out.println("Creating file '" + keyStoreName +"' using Algorithm '"+
symAlg +"' size '"+keySize + "'");
OutputStream stream =null;
try {
stream= new FileOutputStream(keyStoreName);
SecretKey key = initSymKey();
KeyStore store = KeyStore.getInstance("JCEKS");
store.load(null,null);
store.setKeyEntry(alias,key,storePass.toCharArray(),null);
store.store(stream,storePass.toCharArray());
} catch (Exception e){
e.printStackTrace();
}
finally{
try {
stream.close();
} catch (Exception e){
}
}
System.out.println("Finished keystore creation");
}
public static SecretKey initSymKey() throws Exception
{
KeyGenerator keyGen = null;
// generate secret key
keyGen = KeyGenerator.getInstance(getAlgorithm(symAlg));
keyGen.init(keySize);
SecretKey secretKey = keyGen.generateKey();
return secretKey;
}
private static String getAlgorithm(String s)
{
int index = s.indexOf("/");
if (index == -1)
return s;
return s.substring(0, index);
}
}
|
// $Id: NAKACK.java,v 1.96 2007/01/09 16:08:35 belaban Exp $
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.stack.NakReceiverWindow;
import org.jgroups.stack.Protocol;
import org.jgroups.stack.Retransmitter;
import org.jgroups.util.*;
import java.io.IOException;
import java.util.*;
/**
* Negative AcKnowledgement layer (NAKs). Messages are assigned a monotonically increasing sequence number (seqno).
* Receivers deliver messages ordered according to seqno and request retransmission of missing messages. Retransmitted
* messages are bundled into bigger ones, e.g. when getting an xmit request for messages 1-10, instead of sending 10
* unicast messages, we bundle all 10 messages into 1 and send it. However, since this protocol typically sits below
* FRAG, we cannot count on FRAG to fragement/defragment the (possibly) large message into smaller ones. Therefore we
* only bundle messages up to max_xmit_size bytes to prevent too large messages. For example, if the bundled message
* size was a total of 34000 bytes, and max_xmit_size=16000, we'd send 3 messages: 2 16K and a 2K message. <em>Note that
* max_xmit_size should be the same value as FRAG.frag_size (or smaller).</em><br/> Retransmit requests are always sent
* to the sender. If the sender dies, and not everyone has received its messages, they will be lost. In the future, this
* may be changed to have receivers store all messages, so that retransmit requests can be answered by any member.
* Trivial to implement, but not done yet. For most apps, the default retransmit properties are sufficient, if not use
* vsync.
*
* @author Bela Ban
*/
public class NAKACK extends Protocol implements Retransmitter.RetransmitCommand, NakReceiverWindow.Listener {
private long[] retransmit_timeout={600, 1200, 2400, 4800}; // time(s) to wait before requesting retransmission
private boolean is_server=false;
private Address local_addr=null;
private final Vector members=new Vector(11);
private View view;
private long seqno=-1; // current message sequence number (starts with 0)
private long max_xmit_size=8192; // max size of a retransmit message (otherwise send multiple)
private int gc_lag=20; // number of msgs garbage collection lags behind
/**
* Retransmit messages using multicast rather than unicast. This has the advantage that, if many receivers lost a
* message, the sender only retransmits once.
*/
private boolean use_mcast_xmit=true;
/**
* Ask a random member for retransmission of a missing message. If set to true, discard_delivered_msgs will be
* set to false
*/
private boolean xmit_from_random_member=false;
/**
* Messages that have been received in order are sent up the stack (= delivered to the application). Delivered
* messages are removed from NakReceiverWindow.received_msgs and moved to NakReceiverWindow.delivered_msgs, where
* they are later garbage collected (by STABLE). Since we do retransmits only from sent messages, never
* received or delivered messages, we can turn the moving to delivered_msgs off, so we don't keep the message
* around, and don't need to wait for garbage collection to remove them.
*/
private boolean discard_delivered_msgs=false;
/** If value is > 0, the retransmit buffer is bounded: only the max_xmit_buf_size latest messages are kept,
* older ones are discarded when the buffer size is exceeded. A value <= 0 means unbounded buffers
*/
private int max_xmit_buf_size=0;
/**
* Hashtable<Address,NakReceiverWindow>. Stores received messages (keyed by sender). Note that this is no long term
* storage; messages are just stored until they can be delivered (ie., until the correct FIFO order is established)
*/
private final Map received_msgs=new HashMap(11);
/** TreeMap<Long,Message>. Map of messages sent by me (keyed and sorted on sequence number) */
private final TreeMap sent_msgs=new TreeMap();
private boolean leaving=false;
private boolean started=false;
private TimeScheduler timer=null;
private static final String name="NAKACK";
private long xmit_reqs_received;
private long xmit_reqs_sent;
private long xmit_rsps_received;
private long xmit_rsps_sent;
private long missing_msgs_received;
/** Captures stats on XMIT_REQS, XMIT_RSPS per sender */
private HashMap sent=new HashMap();
/** Captures stats on XMIT_REQS, XMIT_RSPS per receiver */
private HashMap received=new HashMap();
private int stats_list_size=20;
/** BoundedList<XmitRequest>. Keeps track of the last stats_list_size XMIT requests */
private BoundedList receive_history;
/** BoundedList<MissingMessage>. Keeps track of the last stats_list_size missing messages received */
private BoundedList send_history;
public NAKACK() {
}
public String getName() {
return name;
}
public long getXmitRequestsReceived() {return xmit_reqs_received;}
public long getXmitRequestsSent() {return xmit_reqs_sent;}
public long getXmitResponsesReceived() {return xmit_rsps_received;}
public long getXmitResponsesSent() {return xmit_rsps_sent;}
public long getMissingMessagesReceived() {return missing_msgs_received;}
public int getPendingRetransmissionRequests() {
int num=0;
NakReceiverWindow win;
synchronized(received_msgs) {
for(Iterator it=received_msgs.values().iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
num+=win.size();
}
}
return num;
}
public int getSentTableSize() {
int size;
synchronized(sent_msgs) {
size=sent_msgs.size();
}
return size;
}
public int getReceivedTableSize() {
int ret=0;
NakReceiverWindow win;
Set s=new LinkedHashSet(received_msgs.values());
for(Iterator it=s.iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
ret+=win.size();
}
return ret;
}
public void resetStats() {
xmit_reqs_received=xmit_reqs_sent=xmit_rsps_received=xmit_rsps_sent=missing_msgs_received=0;
sent.clear();
received.clear();
if(receive_history !=null)
receive_history.removeAll();
if(send_history != null)
send_history.removeAll();
}
public void init() throws Exception {
if(stats) {
send_history=new BoundedList(stats_list_size);
receive_history=new BoundedList(stats_list_size);
}
}
public int getGcLag() {
return gc_lag;
}
public void setGcLag(int gc_lag) {
this.gc_lag=gc_lag;
}
public boolean isUseMcastXmit() {
return use_mcast_xmit;
}
public void setUseMcastXmit(boolean use_mcast_xmit) {
this.use_mcast_xmit=use_mcast_xmit;
}
public boolean isXmitFromRandomMember() {
return xmit_from_random_member;
}
public void setXmitFromRandomMember(boolean xmit_from_random_member) {
this.xmit_from_random_member=xmit_from_random_member;
}
public boolean isDiscardDeliveredMsgs() {
return discard_delivered_msgs;
}
public void setDiscardDeliveredMsgs(boolean discard_delivered_msgs) {
this.discard_delivered_msgs=discard_delivered_msgs;
}
public int getMaxXmitBufSize() {
return max_xmit_buf_size;
}
public void setMaxXmitBufSize(int max_xmit_buf_size) {
this.max_xmit_buf_size=max_xmit_buf_size;
}
public long getMaxXmitSize() {
return max_xmit_size;
}
public void setMaxXmitSize(long max_xmit_size) {
this.max_xmit_size=max_xmit_size;
}
public boolean setProperties(Properties props) {
String str;
long[] tmp;
super.setProperties(props);
str=props.getProperty("retransmit_timeout");
if(str != null) {
tmp=Util.parseCommaDelimitedLongs(str);
props.remove("retransmit_timeout");
if(tmp != null && tmp.length > 0) {
retransmit_timeout=tmp;
}
}
str=props.getProperty("gc_lag");
if(str != null) {
gc_lag=Integer.parseInt(str);
if(gc_lag < 0) {
log.error("NAKACK.setProperties(): gc_lag cannot be negative, setting it to 0");
}
props.remove("gc_lag");
}
str=props.getProperty("max_xmit_size");
if(str != null) {
max_xmit_size=Long.parseLong(str);
props.remove("max_xmit_size");
}
str=props.getProperty("use_mcast_xmit");
if(str != null) {
use_mcast_xmit=Boolean.valueOf(str).booleanValue();
props.remove("use_mcast_xmit");
}
str=props.getProperty("discard_delivered_msgs");
if(str != null) {
discard_delivered_msgs=Boolean.valueOf(str).booleanValue();
props.remove("discard_delivered_msgs");
}
str=props.getProperty("xmit_from_random_member");
if(str != null) {
xmit_from_random_member=Boolean.valueOf(str).booleanValue();
props.remove("xmit_from_random_member");
}
str=props.getProperty("max_xmit_buf_size");
if(str != null) {
max_xmit_buf_size=Integer.parseInt(str);
props.remove("max_xmit_buf_size");
}
str=props.getProperty("stats_list_size");
if(str != null) {
stats_list_size=Integer.parseInt(str);
props.remove("stats_list_size");
}
if(xmit_from_random_member) {
if(discard_delivered_msgs) {
discard_delivered_msgs=false;
log.warn("xmit_from_random_member set to true: changed discard_delivered_msgs to false");
}
}
if(!props.isEmpty()) {
log.error("these properties are not recognized: " + props);
return false;
}
return true;
}
public Map dumpStats() {
Map retval=super.dumpStats();
if(retval == null)
retval=new HashMap();
retval.put("xmit_reqs_received", new Long(xmit_reqs_received));
retval.put("xmit_reqs_sent", new Long(xmit_reqs_sent));
retval.put("xmit_rsps_received", new Long(xmit_rsps_received));
retval.put("xmit_rsps_sent", new Long(xmit_rsps_sent));
retval.put("missing_msgs_received", new Long(missing_msgs_received));
retval.put("sent_msgs", printSentMsgs());
StringBuilder sb=new StringBuilder();
Map.Entry entry;
Address addr;
Object w;
synchronized(received_msgs) {
for(Iterator it=received_msgs.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
addr=(Address)entry.getKey();
w=entry.getValue();
sb.append(addr).append(": ").append(w.toString()).append('\n');
}
}
retval.put("received_msgs", sb.toString());
return retval;
}
public String printStats() {
Map.Entry entry;
Object key, val;
StringBuilder sb=new StringBuilder();
sb.append("sent:\n");
for(Iterator it=sent.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
key=entry.getKey();
if(key == null) key="<mcast dest>";
val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nreceived:\n");
for(Iterator it=received.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
key=entry.getKey();
val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nXMIT_REQS sent:\n");
XmitRequest tmp;
for(Enumeration en=send_history.elements(); en.hasMoreElements();) {
tmp=(XmitRequest)en.nextElement();
sb.append(tmp).append("\n");
}
sb.append("\nMissing messages received\n");
MissingMessage missing;
for(Enumeration en=receive_history.elements(); en.hasMoreElements();) {
missing=(MissingMessage)en.nextElement();
sb.append(missing).append("\n");
}
return sb.toString();
}
public Vector providedUpServices() {
Vector retval=new Vector(5);
retval.addElement(new Integer(Event.GET_DIGEST));
retval.addElement(new Integer(Event.GET_DIGEST_STABLE));
retval.addElement(new Integer(Event.GET_DIGEST_STATE));
retval.addElement(new Integer(Event.SET_DIGEST));
retval.addElement(new Integer(Event.MERGE_DIGEST));
return retval;
}
public void start() throws Exception {
timer=stack != null ? stack.timer : null;
if(timer == null)
throw new Exception("timer is null");
started=true;
}
public void stop() {
started=false;
reset(); // clears sent_msgs and destroys all NakReceiverWindows
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>passDown()</code> in this
* method as the event is passed down by default by the superclass after this method returns !</b>
*/
public void down(Event evt) {
Digest digest;
Vector mbrs;
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress()) {
break; // unicast address: not null and not mcast, pass down unchanged
}
send(evt, msg);
return; // don't pass down the stack
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return; // do not pass down further (Bela Aug 7 2001)
case Event.GET_DIGEST:
digest=getDigest();
passUp(new Event(Event.GET_DIGEST_OK, digest != null ? digest.copy() : null));
return;
case Event.GET_DIGEST_STATE:
digest=getDigest();
passUp(new Event(Event.GET_DIGEST_STATE_OK, digest != null ? digest.copy() : null));
return;
case Event.SET_DIGEST:
setDigest((Digest)evt.getArg());
return;
case Event.MERGE_DIGEST:
mergeDigest((Digest)evt.getArg());
return;
case Event.CONFIG:
passDown(evt);
if(log.isDebugEnabled()) {
log.debug("received CONFIG event: " + evt.getArg());
}
handleConfigEvent((HashMap)evt.getArg());
return;
case Event.TMP_VIEW:
View tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
adjustReceivers(false);
break;
case Event.VIEW_CHANGE:
tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
adjustReceivers(true);
is_server=true; // check vids from now on
Set tmp=new LinkedHashSet(members);
tmp.add(null); // for null destination (= mcast)
sent.keySet().retainAll(tmp);
received.keySet().retainAll(tmp);
view=tmp_view;
break;
case Event.BECOME_SERVER:
is_server=true;
break;
case Event.DISCONNECT:
leaving=true;
reset();
break;
}
passDown(evt);
}
public Object downcall(Event evt) {
Digest digest;
Vector mbrs;
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress()) {
break; // unicast address: not null and not mcast, pass down unchanged
}
send(evt, msg);
return null; // don't pass down the stack
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return null; // do not pass down further (Bela Aug 7 2001)
case Event.GET_DIGEST:
digest=getDigest();
return digest != null? digest.copy() : null;
case Event.GET_DIGEST_STABLE:
return getDigestHighestDeliveredMsgs();
case Event.GET_DIGEST_STATE:
digest=getDigest();
return digest != null? digest.copy() : null;
case Event.SET_DIGEST:
setDigest((Digest)evt.getArg());
return null;
case Event.MERGE_DIGEST:
mergeDigest((Digest)evt.getArg());
return null;
case Event.CONFIG:
Object retval=super.downcall(evt);
if(log.isDebugEnabled())
log.debug("received CONFIG event: " + evt.getArg());
handleConfigEvent((HashMap)evt.getArg());
return retval;
case Event.TMP_VIEW:
View tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
adjustReceivers(false);
break;
case Event.VIEW_CHANGE:
tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
adjustReceivers(true);
is_server=true; // check vids from now on
Set tmp=new LinkedHashSet(members);
tmp.add(null); // for null destination (= mcast)
sent.keySet().retainAll(tmp);
received.keySet().retainAll(tmp);
view=tmp_view;
break;
case Event.BECOME_SERVER:
is_server=true;
break;
case Event.DISCONNECT:
leaving=true;
reset();
break;
}
return super.downcall(evt);
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>PassUp</code> in this
* method as the event is passed up by default by the superclass after this method returns !</b>
*/
public void up(Event evt) {
NakAckHeader hdr;
Message msg;
Digest digest;
switch(evt.getType()) {
case Event.MSG:
msg=(Message)evt.getArg();
hdr=(NakAckHeader)msg.getHeader(name);
if(hdr == null)
break; // pass up (e.g. unicast msg)
// discard messages while not yet server (i.e., until JOIN has returned)
if(!is_server) {
if(trace)
log.trace("message was discarded (not yet server)");
return;
}
// Changed by bela Jan 29 2003: we must not remove the header, otherwise
// further xmit requests will fail !
//hdr=(NakAckHeader)msg.removeHeader(getName());
switch(hdr.type) {
case NakAckHeader.MSG:
handleMessage(msg, hdr);
return; // transmitter passes message up for us !
case NakAckHeader.XMIT_REQ:
if(hdr.range == null) {
if(log.isErrorEnabled()) {
log.error("XMIT_REQ: range of xmit msg is null; discarding request from " + msg.getSrc());
}
return;
}
handleXmitReq(msg.getSrc(), hdr.range.low, hdr.range.high, hdr.sender);
return;
case NakAckHeader.XMIT_RSP:
if(trace)
log.trace("received missing messages " + hdr.range);
handleXmitRsp(msg);
return;
default:
if(log.isErrorEnabled()) {
log.error("NakAck header type " + hdr.type + " not known !");
}
return;
}
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return; // do not pass up further (Bela Aug 7 2001)
case Event.GET_DIGEST:
digest=getDigestHighestDeliveredMsgs();
passDown(new Event(Event.GET_DIGEST_OK, digest));
return;
case Event.SET_LOCAL_ADDRESS:
local_addr=(Address)evt.getArg();
break;
case Event.CONFIG:
passUp(evt);
if(log.isDebugEnabled()) {
log.debug("received CONFIG event: " + evt.getArg());
}
handleConfigEvent((HashMap)evt.getArg());
return;
}
passUp(evt);
}
private void send(Event evt, Message msg) {
if(msg == null)
throw new NullPointerException("msg is null; event is " + evt);
if(!started) {
if(trace)
log.trace("[" + local_addr + "] discarded message as start() has not been called, message: " + msg);
return;
}
long msg_id;
synchronized(sent_msgs) {
try { // incrementing seqno and adding the msg to sent_msgs needs to be atomic
msg_id=seqno +1;
msg.putHeader(name, new NakAckHeader(NakAckHeader.MSG, msg_id));
sent_msgs.put(new Long(msg_id), msg);
seqno=msg_id;
}
catch(Throwable t) {
throw new RuntimeException("failure adding msg " + msg + " to the retransmit table", t);
}
}
try {
if(trace)
log.trace("sending " + local_addr + "#" + msg_id);
passDown(evt); // if this fails, since msg is in sent_msgs, it can be retransmitted
}
catch(Throwable t) { // eat the exception, don't pass it up the stack
if(warn) {
log.warn("failure passing message down", t);
}
}
}
/**
* Finds the corresponding NakReceiverWindow and adds the message to it (according to seqno). Then removes as many
* messages as possible from the NRW and passes them up the stack. Discards messages from non-members.
*/
private void handleMessage(Message msg, NakAckHeader hdr) {
NakReceiverWindow win;
Message msg_to_deliver;
Address sender=msg.getSrc();
if(sender == null) {
if(log.isErrorEnabled())
log.error("sender of message is null");
return;
}
if(trace) {
StringBuilder sb=new StringBuilder('[');
sb.append(local_addr).append(": received ").append(sender).append('#').append(hdr.seqno);
log.trace(sb.toString());
}
// msg is potentially re-sent later as result of XMIT_REQ reception; that's why hdr is added !
// Changed by bela Jan 29 2003: we currently don't resend from received msgs, just from sent_msgs !
// msg.putHeader(getName(), hdr);
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
}
if(win == null) { // discard message if there is no entry for sender
if(leaving)
return;
if(warn) {
StringBuffer sb=new StringBuffer('[');
sb.append(local_addr).append("] discarded message from non-member ")
.append(sender).append(", my view is " ).append(this.view);
log.warn(sb);
}
return;
}
boolean added=win.add(hdr.seqno, msg); // add in order, then remove and pass up as many msgs as possible
// message is passed up if OOB. Later, when remove() is called, we discard it. This affects ordering !
if(msg.isFlagSet(Message.OOB) && added) {
passUp(new Event(Event.MSG, msg));
}
// where lots of threads can come up to this point concurrently, but only 1 is allowed to pass at a time
// We *can* deliver messages from *different* senders concurrently, e.g. reception of P1, Q1, P2, Q2 can result in
// delivery of P1, Q1, Q2, P2: FIFO (implemented by NAKACK) says messages need to be delivered in the
// order in which they were sent by the sender
synchronized(win) {
while((msg_to_deliver=win.remove()) != null) {
if(msg_to_deliver.isFlagSet(Message.OOB)) {
continue;
}
// Changed by bela Jan 29 2003: not needed (see above)
//msg_to_deliver.removeHeader(getName());
passUp(new Event(Event.MSG, msg_to_deliver));
}
}
}
/**
* Retransmit from sent-table, called when XMIT_REQ is received. Bundles all messages to be xmitted into one large
* message and sends them back with an XMIT_RSP header. Note that since we cannot count on a fragmentation layer
* below us, we have to make sure the message doesn't exceed max_xmit_size bytes. If this is the case, we split the
* message into multiple, smaller-chunked messages. But in most cases this still yields fewer messages than if each
* requested message was retransmitted separately.
*
* @param xmit_requester The sender of the XMIT_REQ, we have to send the requested copy of the message to this address
* @param first_seqno The first sequence number to be retransmitted (<= last_seqno)
* @param last_seqno The last sequence number to be retransmitted (>= first_seqno)
* @param original_sender The member who originally sent the messsage. Guaranteed to be non-null
*/
private void handleXmitReq(Address xmit_requester, long first_seqno, long last_seqno, Address original_sender) {
Message m, tmp;
LinkedList list;
long size=0, marker=first_seqno, len;
NakReceiverWindow win=null;
boolean amISender; // am I the original sender ?
if(trace) {
StringBuilder sb=new StringBuilder();
sb.append(local_addr).append(": received xmit request from ").append(xmit_requester).append(" for ");
sb.append(original_sender).append(" [").append(first_seqno).append(" - ").append(last_seqno).append("]");
log.trace(sb.toString());
}
if(first_seqno > last_seqno) {
if(log.isErrorEnabled())
log.error("first_seqno (" + first_seqno + ") > last_seqno (" + last_seqno + "): not able to retransmit");
return;
}
if(stats) {
xmit_reqs_received+=last_seqno - first_seqno +1;
updateStats(received, xmit_requester, 1, 0, 0);
}
amISender=local_addr.equals(original_sender);
if(!amISender)
win=(NakReceiverWindow)received_msgs.get(original_sender);
list=new LinkedList();
for(long i=first_seqno; i <= last_seqno; i++) {
if(amISender) {
m=(Message)sent_msgs.get(new Long(i)); // no need to synchronize
}
else {
m=win != null? win.get(i) : null;
}
if(m == null) {
if(log.isErrorEnabled()) {
StringBuffer sb=new StringBuffer();
sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr);
sb.append(") message ").append(original_sender).append("::").append(i);
sb.append(" not found in ").append((amISender? "sent" : "received")).append(" msgs ");
if(win != null) {
sb.append("from ").append(original_sender).append(": ").append(win.toString());
}
else {
sb.append(printSentMsgs());
}
log.error(sb);
}
continue;
}
len=m.size();
size+=len;
if(size > max_xmit_size && !list.isEmpty()) { // changed from >= to > (yaron-r, bug #943709)
// yaronr: added &&listSize()>0 since protocols between FRAG and NAKACK add headers, and message exceeds size.
// size has reached max_xmit_size. go ahead and send message (excluding the current message)
if(trace)
log.trace("xmitting msgs [" + marker + '-' + (i - 1) + "] to " + xmit_requester);
sendXmitRsp(xmit_requester, (LinkedList)list.clone(), marker, i - 1);
marker=i;
list.clear();
// fixed Dec 15 2003 (bela, patch from Joel Dice (dicej)), see explanantion under
// bug report #854887
size=len;
}
tmp=m;
// tmp.setDest(xmit_requester);
// tmp.setSrc(local_addr);
if(tmp.getSrc() == null)
tmp.setSrc(local_addr);
list.add(tmp);
}
if(!list.isEmpty()) {
if(trace)
log.trace("xmitting msgs [" + marker + '-' + last_seqno + "] to " + xmit_requester);
sendXmitRsp(xmit_requester, (LinkedList)list.clone(), marker, last_seqno);
list.clear();
}
}
private static void updateStats(HashMap map, Address key, int req, int rsp, int missing) {
Entry entry=(Entry)map.get(key);
if(entry == null) {
entry=new Entry();
map.put(key, entry);
}
entry.xmit_reqs+=req;
entry.xmit_rsps+=rsp;
entry.missing_msgs_rcvd+=missing;
}
private void sendXmitRsp(Address dest, LinkedList xmit_list, long first_seqno, long last_seqno) {
Buffer buf;
if(xmit_list == null || xmit_list.isEmpty()) {
if(log.isErrorEnabled())
log.error("xmit_list is empty");
return;
}
if(use_mcast_xmit)
dest=null;
if(stats) {
xmit_rsps_sent+=xmit_list.size();
updateStats(sent, dest, 0, 1, 0);
}
try {
buf=Util.msgListToByteBuffer(xmit_list);
Message msg=new Message(dest, null, buf.getBuf(), buf.getOffset(), buf.getLength());
// changed Bela Jan 4 2007: we should use OOB for retransmitted messages, otherwise we tax the OOB thread pool
// too much
// msg.setFlag(Message.OOB);
msg.putHeader(name, new NakAckHeader(NakAckHeader.XMIT_RSP, first_seqno, last_seqno));
passDown(new Event(Event.MSG, msg));
}
catch(IOException ex) {
log.error("failed marshalling xmit list", ex);
}
}
private void handleXmitRsp(Message msg) {
LinkedList list;
Message m;
if(msg == null) {
if(warn)
log.warn("message is null");
return;
}
try {
list=Util.byteBufferToMessageList(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
if(list != null) {
if(stats) {
xmit_rsps_received+=list.size();
updateStats(received, msg.getSrc(), 0, 1, 0);
}
for(Iterator it=list.iterator(); it.hasNext();) {
m=(Message)it.next();
up(new Event(Event.MSG, m));
}
list.clear();
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) {
log.error("failed reading list of retransmitted messages", ex);
}
}
}
/**
* Remove old members from NakReceiverWindows and add new members (starting seqno=0). Essentially removes all
* entries from received_msgs that are not in <code>members</code>
*/
private void adjustReceivers(boolean remove) {
Address sender;
NakReceiverWindow win;
synchronized(received_msgs) {
if(remove) {
// 1. Remove all senders in received_msgs that are not members anymore
for(Iterator it=received_msgs.keySet().iterator(); it.hasNext();) {
sender=(Address)it.next();
if(!members.contains(sender)) {
win=(NakReceiverWindow)received_msgs.get(sender);
win.reset();
if(log.isDebugEnabled()) {
log.debug("removing " + sender + " from received_msgs (not member anymore)");
}
it.remove();
}
}
}
// 2. Add newly joined members to received_msgs (starting seqno=0)
for(int i=0; i < members.size(); i++) {
sender=(Address)members.elementAt(i);
if(!received_msgs.containsKey(sender)) {
win=createNakReceiverWindow(sender, 0);
received_msgs.put(sender, win);
}
}
}
}
/**
* Returns a message digest: for each member P the highest seqno received from P is added to the digest.
*/
private Digest getDigest() {
Address sender;
Range range;
Map<Address,Digest.Entry> map=new HashMap(members.size());
for(int i=0; i < members.size(); i++) {
sender=(Address)members.elementAt(i);
range=getLowestAndHighestSeqno(sender, false); // get the highest received seqno
if(range == null) {
if(log.isErrorEnabled()) {
log.error("range is null");
}
continue;
}
map.put(sender, new Digest.Entry(range.low, range.high));
}
return new Digest(map);
}
/**
* Returns a message digest: for each member P the highest seqno received from P <em>without a gap</em> is added to
* the digest. E.g. if the seqnos received from P are [+3 +4 +5 -6 +7 +8], then 5 will be returned. Also, the
* highest seqno <em>seen</em> is added. The max of all highest seqnos seen will be used (in STABLE) to determine
* whether the last seqno from a sender was received (see "Last Message Dropped" topic in DESIGN).
*/
private Digest getDigestHighestDeliveredMsgs() {
Address sender;
Range range;
long high_seqno_seen;
Map<Address,Digest.Entry> map=new HashMap(members.size());
for(int i=0; i < members.size(); i++) {
sender=(Address)members.elementAt(i);
range=getLowestAndHighestSeqno(sender, true); // get the highest deliverable seqno
if(range == null) {
if(log.isErrorEnabled()) {
log.error("range is null");
}
continue;
}
high_seqno_seen=getHighSeqnoSeen(sender);
map.put(sender, new Digest.Entry(range.low, range.high, high_seqno_seen));
}
return new Digest(map);
}
/**
* Creates a NakReceiverWindow for each sender in the digest according to the sender's seqno. If NRW already exists,
* reset it.
*/
private void setDigest(Digest digest) {
if(digest == null) {
if(log.isErrorEnabled()) {
log.error("digest or digest.senders is null");
}
return;
}
clear();
Map.Entry entry;
Address sender;
org.jgroups.protocols.pbcast.Digest.Entry val;
long initial_seqno;
NakReceiverWindow win;
for(Iterator it=digest.getSenders().entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
sender=(Address)entry.getKey();
val=(org.jgroups.protocols.pbcast.Digest.Entry)entry.getValue();
if(sender == null || val == null) {
if(warn) {
log.warn("sender or value is null");
}
continue;
}
initial_seqno=val.getHigh();
win=createNakReceiverWindow(sender, initial_seqno);
synchronized(received_msgs) {
received_msgs.put(sender, win);
}
}
}
/**
* For all members of the digest, adjust the NakReceiverWindows in the received_msgs hashtable. If the member
* already exists, sets its seqno to be the max of the seqno and the seqno of the member in the digest. If no entry
* exists, create one with the initial seqno set to the seqno of the member in the digest.
*/
private void mergeDigest(Digest digest) {
if(digest == null) {
if(log.isErrorEnabled()) {
log.error("digest or digest.senders is null");
}
return;
}
Map.Entry entry;
Address sender;
org.jgroups.protocols.pbcast.Digest.Entry val;
NakReceiverWindow win;
long initial_seqno;
for(Iterator it=digest.getSenders().entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
sender=(Address)entry.getKey();
val=(org.jgroups.protocols.pbcast.Digest.Entry)entry.getValue();
if(sender == null || val == null) {
if(warn) {
log.warn("sender or value is null");
}
continue;
}
initial_seqno=val.getHigh();
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
if(win == null) {
win=createNakReceiverWindow(sender, initial_seqno);
received_msgs.put(sender, win);
}
else {
if(win.getHighestReceived() < initial_seqno) {
win.reset();
received_msgs.remove(sender);
win=createNakReceiverWindow(sender, initial_seqno);
received_msgs.put(sender, win);
}
}
}
}
}
private NakReceiverWindow createNakReceiverWindow(Address sender, long initial_seqno) {
NakReceiverWindow win=new NakReceiverWindow(sender, this, initial_seqno, timer);
win.setRetransmitTimeouts(retransmit_timeout);
win.setDiscardDeliveredMessages(discard_delivered_msgs);
win.setMaxXmitBufSize(this.max_xmit_buf_size);
if(stats)
win.setListener(this);
return win;
}
/**
* Returns the lowest seqno still in cache (so it can be retransmitted) and the highest seqno received so far.
*
* @param sender The address for which the highest and lowest seqnos are to be retrieved
* @param stop_at_gaps If true, the highest seqno *deliverable* will be returned. If false, the highest seqno
* *received* will be returned. E.g. for [+3 +4 +5 -6 +7 +8], the highest_seqno_received is 8,
* whereas the higheset_seqno_seen (deliverable) is 5.
*/
private Range getLowestAndHighestSeqno(Address sender, boolean stop_at_gaps) {
Range r=null;
NakReceiverWindow win;
if(sender == null) {
if(log.isErrorEnabled()) {
log.error("sender is null");
}
return r;
}
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
}
if(win == null) {
if(log.isErrorEnabled()) {
log.error("sender " + sender + " not found in received_msgs");
}
return r;
}
if(stop_at_gaps) {
r=new Range(win.getLowestSeen(), win.getHighestSeen()); // deliverable messages (no gaps)
}
else {
r=new Range(win.getLowestSeen(), win.getHighestReceived() + 1); // received messages
}
return r;
}
/**
* Returns the highest seqno seen from sender. E.g. if we received 1, 2, 4, 5 from P, then 5 will be returned
* (doesn't take gaps into account). If we are the sender, we will return the highest seqno <em>sent</em> rather
* then <em>received</em>
*/
private long getHighSeqnoSeen(Address sender) {
NakReceiverWindow win;
long ret=0;
if(sender == null) {
if(log.isErrorEnabled()) {
log.error("sender is null");
}
return ret;
}
if(sender.equals(local_addr)) {
return seqno - 1;
}
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
}
if(win == null) {
if(log.isErrorEnabled()) {
log.error("sender " + sender + " not found in received_msgs");
}
return ret;
}
ret=win.getHighestReceived();
return ret;
}
/**
* Garbage collect messages that have been seen by all members. Update sent_msgs: for the sender P in the digest
* which is equal to the local address, garbage collect all messages <= seqno at digest[P]. Update received_msgs:
* for each sender P in the digest and its highest seqno seen SEQ, garbage collect all delivered_msgs in the
* NakReceiverWindow corresponding to P which are <= seqno at digest[P].
*/
private void stable(Digest d) {
NakReceiverWindow recv_win;
long my_highest_rcvd; // highest seqno received in my digest for a sender P
long stability_highest_rcvd; // highest seqno received in the stability vector for a sender P
if(members == null || local_addr == null || d == null) {
if(warn)
log.warn("members, local_addr or digest are null !");
return;
}
if(trace) {
log.trace("received stable digest " + d);
}
Map.Entry entry;
Address sender;
org.jgroups.protocols.pbcast.Digest.Entry val;
long high_seqno_delivered, high_seqno_received;
for(Iterator it=d.getSenders().entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
sender=(Address)entry.getKey();
if(sender == null)
continue;
val=(org.jgroups.protocols.pbcast.Digest.Entry)entry.getValue();
high_seqno_delivered=val.getHigh();
high_seqno_received=val.getHighSeen();
// check whether the last seqno received for a sender P in the stability vector is > last seqno
// received for P in my digest. if yes, request retransmission (see "Last Message Dropped" topic
// in DESIGN)
synchronized(received_msgs) {
recv_win=(NakReceiverWindow)received_msgs.get(sender);
}
if(recv_win != null) {
my_highest_rcvd=recv_win.getHighestReceived();
stability_highest_rcvd=high_seqno_received;
if(stability_highest_rcvd >= 0 && stability_highest_rcvd > my_highest_rcvd) {
if(trace) {
log.trace("my_highest_rcvd (" + my_highest_rcvd + ") < stability_highest_rcvd (" +
stability_highest_rcvd + "): requesting retransmission of " +
sender + '#' + stability_highest_rcvd);
}
retransmit(stability_highest_rcvd, stability_highest_rcvd, sender);
}
}
high_seqno_delivered-=gc_lag;
if(high_seqno_delivered < 0) {
continue;
}
if(trace)
log.trace("deleting msgs <= " + high_seqno_delivered + " from " + sender);
// garbage collect from sent_msgs if sender was myself
if(sender.equals(local_addr)) {
synchronized(sent_msgs) {
// gets us a subset from [lowest seqno - seqno]
SortedMap stable_keys=sent_msgs.headMap(new Long(high_seqno_delivered));
if(stable_keys != null) {
stable_keys.clear(); // this will modify sent_msgs directly
}
}
}
// delete *delivered* msgs that are stable
// recv_win=(NakReceiverWindow)received_msgs.get(sender);
if(recv_win != null)
recv_win.stable(high_seqno_delivered); // delete all messages with seqnos <= seqno
}
}
/**
* Implementation of Retransmitter.RetransmitCommand. Called by retransmission thread when gap is detected.
*/
public void retransmit(long first_seqno, long last_seqno, Address sender) {
NakAckHeader hdr;
Message retransmit_msg;
Address dest=sender; // to whom do we send the XMIT request ?
if(xmit_from_random_member && !local_addr.equals(sender)) {
Address random_member=(Address)Util.pickRandomElement(members);
if(random_member != null && !local_addr.equals(random_member)) {
dest=random_member;
if(trace)
log.trace("picked random member " + dest + " to send XMIT request to");
}
}
hdr=new NakAckHeader(NakAckHeader.XMIT_REQ, first_seqno, last_seqno, sender);
retransmit_msg=new Message(dest, null, null);
retransmit_msg.setFlag(Message.OOB);
if(trace)
log.trace(local_addr + ": sending XMIT_REQ ([" + first_seqno + ", " + last_seqno + "]) to " + dest);
retransmit_msg.putHeader(name, hdr);
passDown(new Event(Event.MSG, retransmit_msg));
if(stats) {
xmit_reqs_sent+=last_seqno - first_seqno +1;
updateStats(sent, dest, 1, 0, 0);
for(long i=first_seqno; i <= last_seqno; i++) {
XmitRequest req=new XmitRequest(sender, i, dest);
send_history.add(req);
}
}
}
public void missingMessageReceived(long seqno, Message msg) {
if(stats) {
missing_msgs_received++;
updateStats(received, msg.getSrc(), 0, 0, 1);
MissingMessage missing=new MissingMessage(msg.getSrc(), seqno);
receive_history.add(missing);
}
}
private void clear() {
NakReceiverWindow win;
// changed April 21 2004 (bela): SourceForge bug# 938584. We cannot delete our own messages sent between
// a join() and a getState(). Otherwise retransmission requests from members who missed those msgs might
// fail. Not to worry though: those msgs will be cleared by STABLE (message garbage collection)
// sent_msgs.clear();
synchronized(received_msgs) {
for(Iterator it=received_msgs.values().iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
win.reset();
}
received_msgs.clear();
}
}
private void reset() {
NakReceiverWindow win;
synchronized(sent_msgs) {
sent_msgs.clear();
seqno=-1;
}
synchronized(received_msgs) {
for(Iterator it=received_msgs.values().iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
win.destroy();
}
received_msgs.clear();
}
}
public String printMessages() {
StringBuilder ret=new StringBuilder();
Map.Entry entry;
Address addr;
Object w;
ret.append("\nsent_msgs: ").append(printSentMsgs());
ret.append("\nreceived_msgs:\n");
synchronized(received_msgs) {
for(Iterator it=received_msgs.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
addr=(Address)entry.getKey();
w=entry.getValue();
ret.append(addr).append(": ").append(w.toString()).append('\n');
}
}
return ret.toString();
}
public String printSentMsgs() {
StringBuilder sb=new StringBuilder();
Long min_seqno, max_seqno;
synchronized(sent_msgs) {
min_seqno=!sent_msgs.isEmpty()? (Long)sent_msgs.firstKey() : new Long(0);
max_seqno=!sent_msgs.isEmpty()? (Long)sent_msgs.lastKey() : new Long(0);
}
sb.append('[').append(min_seqno).append(" - ").append(max_seqno).append("] (").append(sent_msgs.size()).append(")");
return sb.toString();
}
private void handleConfigEvent(HashMap map) {
if(map == null) {
return;
}
if(map.containsKey("frag_size")) {
max_xmit_size=((Integer)map.get("frag_size")).intValue();
if(log.isInfoEnabled()) {
log.info("max_xmit_size=" + max_xmit_size);
}
}
}
static class Entry {
long xmit_reqs, xmit_rsps, missing_msgs_rcvd;
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append(xmit_reqs).append(" xmit_reqs").append(", ").append(xmit_rsps).append(" xmit_rsps");
sb.append(", ").append(missing_msgs_rcvd).append(" missing msgs");
return sb.toString();
}
}
static class XmitRequest {
Address original_sender; // original sender of message
long seq, timestamp=System.currentTimeMillis();
Address xmit_dest; // destination to which XMIT_REQ is sent, usually the original sender
XmitRequest(Address original_sender, long seqno, Address xmit_dest) {
this.original_sender=original_sender;
this.xmit_dest=xmit_dest;
this.seq=seqno;
}
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append(new Date(timestamp)).append(": ").append(original_sender).append(" #").append(seq);
sb.append(" (XMIT_REQ sent to ").append(xmit_dest).append(")");
return sb.toString();
}
}
static class MissingMessage {
Address original_sender;
long seq, timestamp=System.currentTimeMillis();
MissingMessage(Address original_sender, long seqno) {
this.original_sender=original_sender;
this.seq=seqno;
}
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append(new Date(timestamp)).append(": ").append(original_sender).append(" #").append(seq);
return sb.toString();
}
}
}
|
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.protocols.TP;
import org.jgroups.annotations.*;
import org.jgroups.conf.PropertyConverters;
import org.jgroups.stack.*;
import org.jgroups.util.*;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Negative AcKnowledgement layer (NAKs). Messages are assigned a monotonically
* increasing sequence number (seqno). Receivers deliver messages ordered
* according to seqno and request retransmission of missing messages.<br/>
* Retransmit requests are usually sent to the original sender of a message, but
* this can be changed by xmit_from_random_member (send to random member) or
* use_mcast_xmit_req (send to everyone). Responses can also be sent to everyone
* instead of the requester by setting use_mcast_xmit to true.
*
* @author Bela Ban
*/
@MBean(description="Reliable transmission multipoint FIFO protocol")
@DeprecatedProperty(names={"max_xmit_size", "eager_lock_release", "stats_list_size", "max_xmit_buf_size"})
public class NAKACK extends Protocol implements Retransmitter.RetransmitCommand, NakReceiverWindow.Listener, TP.ProbeHandler {
/** the weight with which we take the previous smoothed average into account, WEIGHT should be >0 and <= 1 */
private static final double WEIGHT=0.9;
private static final double INITIAL_SMOOTHED_AVG=30.0;
private static final int NUM_REBROADCAST_MSGS=3;
@Property(name="retransmit_timeout", converter=PropertyConverters.LongArray.class, description="Timeout before requesting retransmissions. Default is 600, 1200, 2400, 4800")
private long[] retransmit_timeouts= { 600, 1200, 2400, 4800 }; // time(s) to wait before requesting retransmission
@Property(description="If true, retransmissions stats will be captured. Default is false")
boolean enable_xmit_time_stats=false;
@Property(description="Garbage collection lag")
private int gc_lag=20; // number of msgs garbage collection lags behind
@Property(description="Max number of messages to be removed from a NakReceiverWindow. This property might " +
"get removed anytime, so don't use it !")
private int max_msg_batch_size=20000;
/**
* Retransmit messages using multicast rather than unicast. This has the advantage that, if many receivers
* lost a message, the sender only retransmits once
*/
@Property(description="Retransmit messages using multicast rather than unicast")
private boolean use_mcast_xmit=true;
/**
* Use a multicast to request retransmission of missing messages. This may
* be costly as every member in the cluster will send a response
*/
@Property(description="Use a multicast to request retransmission of missing messages. Default is false")
private boolean use_mcast_xmit_req=false;
/**
* Ask a random member for retransmission of a missing message. If set to
* true, discard_delivered_msgs will be set to false
*/
@Property(description="Ask a random member for retransmission of a missing message. Default is false")
private boolean xmit_from_random_member=false;
/**
* The first value (in milliseconds) to use in the exponential backoff
* retransmission mechanism. Only enabled if the value is > 0
*/
@Property(description="The first value (in milliseconds) to use in the exponential backoff. Enabled if greater than 0. Default is 0")
private long exponential_backoff=0;
/**
* If enabled, we use statistics gathered from actual retransmission times
* to compute the new retransmission times
*/
@Property(description="Use statistics gathered from actual retransmission times to compute new retransmission times. Default is false")
private boolean use_stats_for_retransmission=false;
@Property(description="Whether to use the old retransmitter which retransmits individual messages or the new one " +
"which uses ranges of retransmitted messages. Default is true. Note that this property will be removed in 3.0; " +
"it is only used to switch back to the old (and proven) retransmitter mechanism if issues occur")
@Deprecated
private boolean use_range_based_retransmitter=true;
/**
* Messages that have been received in order are sent up the stack (=
* delivered to the application). Delivered messages are removed from
* NakReceiverWindow.xmit_table and moved to
* NakReceiverWindow.delivered_msgs, where they are later garbage collected
* (by STABLE). Since we do retransmits only from sent messages, never
* received or delivered messages, we can turn the moving to delivered_msgs
* off, so we don't keep the message around, and don't need to wait for
* garbage collection to remove them.
*/
@Property(description="Should messages delivered to application be discarded")
private boolean discard_delivered_msgs=false;
@Property(description="Size of retransmission history. Default is 50 entries")
private int xmit_history_max_size=50;
@Property(description="Timeout to rebroadcast messages. Default is 2000 msec")
private long max_rebroadcast_timeout=2000;
/**
* When not finding a message on an XMIT request, include the last N
* stability messages in the error message
*/
@Property(description="Should stability history be printed if we fail in retransmission. Default is false")
protected boolean print_stability_history_on_failed_xmit=false;
/** If true, logs messages discarded because received from other members */
@Property(description="discards warnings about promiscuous traffic")
private boolean log_discard_msgs=true;
@Property(description="If true, trashes warnings about retransmission messages not found in the xmit_table (used for testing)")
private boolean log_not_found_msgs=true;
@ManagedAttribute(description="Number of retransmit requests received")
private long xmit_reqs_received;
@ManagedAttribute(description="Number of retransmit requests sent")
private long xmit_reqs_sent;
@ManagedAttribute(description="Number of retransmit responses received")
private long xmit_rsps_received;
@ManagedAttribute(description="Number of retransmit responses sent")
private long xmit_rsps_sent;
@ManagedAttribute(description="Number of missing messages received")
private long missing_msgs_received;
/**
* Maintains retransmission related data across a time. Only used if enable_xmit_time_stats is set to true.
* At program termination, accumulated data is dumped to a file named by the address of the member.
* Careful, don't enable this in production as the data in this hashmap are
* never reaped ! Really only meant for diagnostics !
*/
private ConcurrentMap<Long,XmitTimeStat> xmit_time_stats=null;
private long xmit_time_stats_start;
/**
* Captures stats on XMIT_REQS, XMIT_RSPS per sender
*/
private ConcurrentMap<Address, StatsEntry> sent=Util.createConcurrentMap();
/**
* Captures stats on XMIT_REQS, XMIT_RSPS per receiver
*/
private ConcurrentMap<Address, StatsEntry> received=Util.createConcurrentMap();
/**
* Per-sender map of seqnos and timestamps, to keep track of avg times for retransmission of messages
*/
private final ConcurrentMap<Address, ConcurrentMap<Long, Long>> xmit_stats=Util.createConcurrentMap();
/**
* Maintains a list of the last N retransmission times (duration it took to retransmit a message) for all members
*/
private final ConcurrentMap<Address, BoundedList<Long>> xmit_times_history=Util.createConcurrentMap();
/**
* Maintains a smoothed average of the retransmission times per sender,
* these are the actual values that are used for new retransmission requests
*/
private final Map<Address,Double> smoothed_avg_xmit_times=new HashMap<Address,Double>();
/** Keeps the last 50 retransmit requests */
private final BoundedList<String> xmit_history=new BoundedList<String>(50);
private boolean is_server=false;
private Address local_addr=null;
private final List<Address> members=new CopyOnWriteArrayList<Address>();
private View view;
@GuardedBy("seqno_lock")
private long seqno=0; // current message sequence number (starts with 1)
private final Lock seqno_lock=new ReentrantLock();
/** Map to store sent and received messages (keyed by sender) */
private final ConcurrentMap<Address,NakReceiverWindow> xmit_table=Util.createConcurrentMap();
private volatile boolean leaving=false;
private volatile boolean running=false;
private TimeScheduler timer=null;
private final Lock rebroadcast_lock=new ReentrantLock();
private final Condition rebroadcast_done=rebroadcast_lock.newCondition();
// set during processing of a rebroadcast event
private volatile boolean rebroadcasting=false;
private final Lock rebroadcast_digest_lock=new ReentrantLock();
@GuardedBy("rebroadcast_digest_lock")
private Digest rebroadcast_digest=null;
/** BoundedList<Digest>, keeps the last 10 stability messages */
protected final BoundedList<Digest> stability_msgs=new BoundedList<Digest>(10);
/** Keeps a bounded list of the last N digest sets */
protected final BoundedList<String> digest_history=new BoundedList<String>(10);
public NAKACK() {
}
@Deprecated
public static int getUndeliveredMessages() {
return 0;
}
public long getXmitRequestsReceived() {return xmit_reqs_received;}
public long getXmitRequestsSent() {return xmit_reqs_sent;}
public long getXmitResponsesReceived() {return xmit_rsps_received;}
public long getXmitResponsesSent() {return xmit_rsps_sent;}
public long getMissingMessagesReceived() {return missing_msgs_received;}
@ManagedAttribute(description="Total number of missing messages")
public int getPendingRetransmissionRequests() {
int num=0;
for(NakReceiverWindow win: xmit_table.values()) {
num+=win.getPendingXmits();
}
return num;
}
@ManagedAttribute
public int getXmitTableSize() {
int num=0;
for(NakReceiverWindow win: xmit_table.values()) {
num+=win.size();
}
return num;
}
@ManagedAttribute
public long getCurrentSeqno() {return seqno;}
@ManagedOperation
public String printRetransmitStats() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet())
sb.append(entry.getKey()).append(": ").append(entry.getValue().printRetransmitStats()).append("\n");
return sb.toString();
}
public int getReceivedTableSize() {
return getPendingRetransmissionRequests();
}
/**
* Please don't use this method; it is only provided for unit testing !
* @param mbr
* @return
*/
public NakReceiverWindow getWindow(Address mbr) {
return xmit_table.get(mbr);
}
public void resetStats() {
xmit_reqs_received=xmit_reqs_sent=xmit_rsps_received=xmit_rsps_sent=missing_msgs_received=0;
sent.clear();
received.clear();
stability_msgs.clear();
digest_history.clear();
xmit_history.clear();
}
public void init() throws Exception {
if(enable_xmit_time_stats) {
if(log.isWarnEnabled())
log.warn("enable_xmit_time_stats is experimental, and may be removed in any release");
xmit_time_stats=new ConcurrentHashMap<Long,XmitTimeStat>();
xmit_time_stats_start=System.currentTimeMillis();
}
if(xmit_from_random_member) {
if(discard_delivered_msgs) {
discard_delivered_msgs=false;
log.warn("xmit_from_random_member set to true: changed discard_delivered_msgs to false");
}
}
TP transport=getTransport();
if(transport != null) {
transport.registerProbeHandler(this);
if(!transport.supportsMulticasting()) {
if(use_mcast_xmit) {
log.warn("use_mcast_xmit should not be used because the transport (" + transport.getName() +
") does not support IP multicasting; setting use_mcast_xmit to false");
use_mcast_xmit=false;
}
if(use_mcast_xmit_req) {
log.warn("use_mcast_xmit_req should not be used because the transport (" + transport.getName() +
") does not support IP multicasting; setting use_mcast_xmit_req to false");
use_mcast_xmit_req=false;
}
}
}
}
public int getGcLag() {
return gc_lag;
}
public void setGcLag(int gc_lag) {
this.gc_lag=gc_lag;
}
public boolean isUseMcastXmit() {
return use_mcast_xmit;
}
public void setUseMcastXmit(boolean use_mcast_xmit) {
this.use_mcast_xmit=use_mcast_xmit;
}
public boolean isXmitFromRandomMember() {
return xmit_from_random_member;
}
public void setXmitFromRandomMember(boolean xmit_from_random_member) {
this.xmit_from_random_member=xmit_from_random_member;
}
public boolean isDiscardDeliveredMsgs() {
return discard_delivered_msgs;
}
public void setDiscardDeliveredMsgs(boolean discard_delivered_msgs) {
boolean old=this.discard_delivered_msgs;
this.discard_delivered_msgs=discard_delivered_msgs;
if(old != this.discard_delivered_msgs) {
for(NakReceiverWindow win: xmit_table.values()) {
win.setDiscardDeliveredMessages(this.discard_delivered_msgs);
}
}
}
@Deprecated
public int getMaxXmitBufSize() {
return 0;
}
@Deprecated
public void setMaxXmitBufSize(int max_xmit_buf_size) {
;
}
/**
*
* @return
* @deprecated removed in 2.6
*/
public static long getMaxXmitSize() {
return -1;
}
/**
*
* @param max_xmit_size
* @deprecated removed in 2.6
*/
public void setMaxXmitSize(long max_xmit_size) {
}
public void setLogDiscardMessages(boolean flag) {
log_discard_msgs=flag;
}
public void setLogDiscardMsgs(boolean flag) {
setLogDiscardMessages(flag);
}
public boolean getLogDiscardMessages() {
return log_discard_msgs;
}
public Map<String,Object> dumpStats() {
Map<String,Object> retval=super.dumpStats();
retval.put("msgs", printMessages());
return retval;
}
public String printStats() {
StringBuilder sb=new StringBuilder();
sb.append("sent:\n");
for(Iterator<Map.Entry<Address, StatsEntry>> it=sent.entrySet().iterator(); it.hasNext();) {
Map.Entry<Address, StatsEntry> entry=it.next();
Object key=entry.getKey();
if(key == null || key == Global.NULL) key="<mcast dest>";
StatsEntry val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nreceived:\n");
for(Iterator<Map.Entry<Address, StatsEntry>> it=received.entrySet().iterator(); it.hasNext();) {
Map.Entry<Address, StatsEntry> entry=it.next();
Object key=entry.getKey();
if(key == null || key == Global.NULL) key="<mcast dest>";
StatsEntry val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nStability messages received\n");
sb.append(printStabilityMessages()).append("\n");
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printStabilityMessages() {
StringBuilder sb=new StringBuilder();
sb.append(Util.printListWithDelimiter(stability_msgs, "\n"));
return sb.toString();
}
public String printStabilityHistory() {
StringBuilder sb=new StringBuilder();
int i=1;
for(Digest digest: stability_msgs) {
sb.append(i++).append(": ").append(digest).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="Keeps information about the last N times a digest was set or merged")
public String printDigestHistory() {
StringBuilder sb=new StringBuilder(local_addr + ":\n");
for(String tmp: digest_history)
sb.append(tmp).append("\n");
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printLossRates() {
StringBuilder sb=new StringBuilder();
NakReceiverWindow win;
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
win=entry.getValue();
sb.append(entry.getKey()).append(": ").append(win.printLossRate()).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="Returns the sizes of all NakReceiverWindow.RetransmitTables")
public String printRetransmitTableSizes() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
NakReceiverWindow win=entry.getValue();
sb.append(entry.getKey() + ": ").append(win.getRetransmiTableSize())
.append(" (capacity=" + win.getRetransmitTableCapacity() + ")\n");
}
return sb.toString();
}
@ManagedAttribute
public double getAverageLossRate() {
double retval=0.0;
int count=0;
if(xmit_table.isEmpty())
return 0.0;
for(NakReceiverWindow win: xmit_table.values()) {
retval+=win.getLossRate();
count++;
}
return retval / (double)count;
}
@ManagedAttribute
public double getAverageSmoothedLossRate() {
double retval=0.0;
int count=0;
if(xmit_table.isEmpty())
return 0.0;
for(NakReceiverWindow win: xmit_table.values()) {
retval+=win.getSmoothedLossRate();
count++;
}
return retval / (double)count;
}
public Vector<Integer> providedUpServices() {
Vector<Integer> retval=new Vector<Integer>(5);
retval.addElement(new Integer(Event.GET_DIGEST));
retval.addElement(new Integer(Event.SET_DIGEST));
retval.addElement(new Integer(Event.OVERWRITE_DIGEST));
retval.addElement(new Integer(Event.MERGE_DIGEST));
return retval;
}
public void start() throws Exception {
timer=getTransport().getTimer();
if(timer == null)
throw new Exception("timer is null");
running=true;
leaving=false;
if(xmit_time_stats != null) {
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
String filename="xmit-stats-" + local_addr + ".log";
try {
dumpXmitStats(filename);
}
catch(IOException e) {
e.printStackTrace();
}
}
});
}
}
public void stop() {
running=false;
reset(); // clears sent_msgs and destroys all NakReceiverWindows
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>down_prot.down()</code> in this
* method as the event is passed down by default by the superclass after this method returns !</b>
*/
public Object down(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress() || msg.isFlagSet(Message.NO_RELIABILITY))
break; // unicast address: not null and not mcast, pass down unchanged
send(evt, msg);
return null; // don't pass down the stack
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return null; // do not pass down further (Bela Aug 7 2001)
case Event.GET_DIGEST:
return getDigest();
case Event.SET_DIGEST:
setDigest((Digest)evt.getArg());
return null;
case Event.OVERWRITE_DIGEST:
overwriteDigest((Digest)evt.getArg());
return null;
case Event.MERGE_DIGEST:
mergeDigest((Digest)evt.getArg());
return null;
case Event.TMP_VIEW:
View tmp_view=(View)evt.getArg();
Vector<Address> mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
// adjustReceivers(false);
break;
case Event.VIEW_CHANGE:
tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
view=tmp_view;
adjustReceivers(members);
is_server=true; // check vids from now on
Set<Address> tmp=new LinkedHashSet<Address>(members);
tmp.add(null); // for null destination (= mcast)
sent.keySet().retainAll(tmp);
received.keySet().retainAll(tmp);
xmit_stats.keySet().retainAll(tmp);
// in_progress.keySet().retainAll(mbrs); // remove elements which are not in the membership
break;
case Event.BECOME_SERVER:
is_server=true;
break;
case Event.SET_LOCAL_ADDRESS:
local_addr=(Address)evt.getArg();
break;
case Event.DISCONNECT:
leaving=true;
reset();
break;
case Event.REBROADCAST:
rebroadcasting=true;
rebroadcast_digest=(Digest)evt.getArg();
try {
rebroadcastMessages();
}
finally {
rebroadcasting=false;
rebroadcast_digest_lock.lock();
try {
rebroadcast_digest=null;
}
finally {
rebroadcast_digest_lock.unlock();
}
}
return null;
}
return down_prot.down(evt);
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>PassUp</code> in this
* method as the event is passed up by default by the superclass after this method returns !</b>
*/
public Object up(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
if(msg.isFlagSet(Message.NO_RELIABILITY))
break;
NakAckHeader hdr=(NakAckHeader)msg.getHeader(this.id);
if(hdr == null)
break; // pass up (e.g. unicast msg)
if(!is_server) { // discard messages while not yet server (i.e., until JOIN has returned)
if(log.isTraceEnabled())
log.trace("message " + msg.getSrc() + "::" + hdr.seqno + " was discarded (not yet server)");
return null;
}
// Changed by bela Jan 29 2003: we must not remove the header, otherwise further xmit requests will fail !
//hdr=(NakAckHeader)msg.removeHeader(getName());
switch(hdr.type) {
case NakAckHeader.MSG:
handleMessage(msg, hdr);
return null; // transmitter passes message up for us !
case NakAckHeader.XMIT_REQ:
if(hdr.range == null) {
if(log.isErrorEnabled()) {
log.error("XMIT_REQ: range of xmit msg is null; discarding request from " + msg.getSrc());
}
return null;
}
handleXmitReq(msg.getSrc(), hdr.range.low, hdr.range.high, hdr.sender);
return null;
case NakAckHeader.XMIT_RSP:
handleXmitRsp(msg);
return null;
default:
if(log.isErrorEnabled()) {
log.error("NakAck header type " + hdr.type + " not known !");
}
return null;
}
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return null; // do not pass up further (Bela Aug 7 2001)
case Event.SUSPECT:
// release the promise if rebroadcasting is in progress... otherwise we wait forever. there will be a new
// flush round anyway
if(rebroadcasting) {
cancelRebroadcasting();
}
break;
}
return up_prot.up(evt);
}
private void send(Event evt, Message msg) {
if(msg == null)
throw new NullPointerException("msg is null; event is " + evt);
if(!running) {
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] discarded message as we're not in the 'running' state, message: " + msg);
return;
}
long msg_id;
NakReceiverWindow win=xmit_table.get(local_addr);
if(win == null) { // discard message if there is no entry for local_addr
if(log.isWarnEnabled() && log_discard_msgs)
log.warn(local_addr + ": discarded message from " + local_addr + " with no window, my view is " + view);
return;
}
if(msg.getSrc() == null)
msg.setSrc(local_addr); // this needs to be done so we can check whether the message sender is the local_addr
seqno_lock.lock();
try {
try { // incrementing seqno and adding the msg to sent_msgs needs to be atomic
msg_id=seqno +1;
msg.putHeader(this.id, NakAckHeader.createMessageHeader(msg_id));
win.add(msg_id, msg);
seqno=msg_id;
}
catch(Throwable t) {
throw new RuntimeException("failure adding msg " + msg + " to the retransmit table for " + local_addr, t);
}
}
finally {
seqno_lock.unlock();
}
try {
if(log.isTraceEnabled())
log.trace("sending " + local_addr + "#" + msg_id);
down_prot.down(evt); // if this fails, since msg is in sent_msgs, it can be retransmitted
}
catch(Throwable t) { // eat the exception, don't pass it up the stack
if(log.isWarnEnabled()) {
log.warn("failure passing message down", t);
}
}
}
/**
* Finds the corresponding NakReceiverWindow and adds the message to it (according to seqno). Then removes as many
* messages as possible from the NRW and passes them up the stack. Discards messages from non-members.
*/
private void handleMessage(Message msg, NakAckHeader hdr) {
Address sender=msg.getSrc();
if(sender == null) {
if(log.isErrorEnabled())
log.error("sender of message is null");
return;
}
if(log.isTraceEnabled())
log.trace(new StringBuilder().append(local_addr).append(": received ").append(sender).append('#').append(hdr.seqno));
NakReceiverWindow win=xmit_table.get(sender);
if(win == null) { // discard message if there is no entry for sender
if(leaving)
return;
if(log.isWarnEnabled() && log_discard_msgs)
log.warn(local_addr + ": dropped message from " + sender + " (not in table " + xmit_table.keySet() +"), view=" + view);
return;
}
boolean loopback=local_addr.equals(sender);
boolean added=loopback || win.add(hdr.seqno, msg);
if(added && msg.isFlagSet(Message.OOB)) {
if(loopback)
msg=win.get(hdr.seqno); // we *have* to get a message, because loopback means we didn't add it to win !
if(msg != null && msg.isFlagSet(Message.OOB)) {
if(msg.setTransientFlagIfAbsent(Message.OOB_DELIVERED))
up_prot.up(new Event(Event.MSG, msg));
}
}
// Efficient way of checking whether another thread is already processing messages from 'sender'.
// If that's the case, we return immediately and let the existing thread process our message
// can be returned to the thread pool
final AtomicBoolean processing=win.getProcessing();
if(!processing.compareAndSet(false, true)) {
return;
}
boolean remove_msgs=discard_delivered_msgs && !loopback;
boolean released_processing=false;
try {
while(true) {
// we're removing a msg and set processing to false (if null) *atomically* (wrt to add())
List<Message> msgs=win.removeMany(processing, remove_msgs, max_msg_batch_size);
if(msgs == null || msgs.isEmpty()) {
released_processing=true;
return;
}
for(final Message msg_to_deliver: msgs) {
if(msg_to_deliver.isFlagSet(Message.OOB) && !msg_to_deliver.setTransientFlagIfAbsent(Message.OOB_DELIVERED))
continue;
//msg_to_deliver.removeHeader(getName()); // Changed by bela Jan 29 2003: not needed (see above)
try {
up_prot.up(new Event(Event.MSG, msg_to_deliver));
}
catch(Throwable t) {
log.error("couldn't deliver message " + msg_to_deliver, t);
}
}
}
}
finally {
// processing is always set in win.remove(processing) above and never here ! This code is just a
// 2nd line of defense should there be an exception before win.remove(processing) sets processing
if(!released_processing)
processing.set(false);
}
}
/**
* Retransmits messsages first_seqno to last_seqno from original_sender from xmit_table to xmit_requester,
* called when XMIT_REQ is received.
* @param xmit_requester The sender of the XMIT_REQ, we have to send the requested copy of the message to this address
* @param first_seqno The first sequence number to be retransmitted (<= last_seqno)
* @param last_seqno The last sequence number to be retransmitted (>= first_seqno)
* @param original_sender The member who originally sent the messsage. Guaranteed to be non-null
*/
private void handleXmitReq(Address xmit_requester, long first_seqno, long last_seqno, Address original_sender) {
Message msg;
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append(local_addr).append(": received xmit request from ").append(xmit_requester).append(" for ");
sb.append(original_sender).append(" [").append(first_seqno).append(" - ").append(last_seqno).append("]");
log.trace(sb.toString());
}
if(first_seqno > last_seqno)
return;
if(stats) {
xmit_reqs_received+=last_seqno - first_seqno +1;
updateStats(received, xmit_requester, 1, 0, 0);
}
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.xmit_reqs_received.addAndGet((int)(last_seqno - first_seqno +1));
stat.xmit_rsps_sent.addAndGet((int)(last_seqno - first_seqno +1));
}
NakReceiverWindow win=xmit_table.get(original_sender);
if(win == null) {
if(log.isErrorEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr);
sb.append(") ").append(original_sender).append(" not found in retransmission table");
// don't print the table unless we are in trace mode because it can be LARGE
if (log.isTraceEnabled()) {
sb.append(":\n").append(printMessages());
}
if(print_stability_history_on_failed_xmit) {
sb.append(" (stability history:\n").append(printStabilityHistory());
}
log.error(sb.toString());
}
return;
}
for(long i=first_seqno; i <= last_seqno; i++) {
msg=win.get(i);
if(msg == null) {
if(log.isWarnEnabled() && log_not_found_msgs && !local_addr.equals(xmit_requester)) {
StringBuilder sb=new StringBuilder();
sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr);
sb.append(") message ").append(original_sender).append("::").append(i);
sb.append(" not found in retransmission table of ").append(original_sender).append(":\n").append(win);
if(print_stability_history_on_failed_xmit) {
sb.append(" (stability history:\n").append(printStabilityHistory());
}
log.warn(sb.toString());
}
continue;
}
sendXmitRsp(xmit_requester, msg);
}
}
private void cancelRebroadcasting() {
rebroadcast_lock.lock();
try {
rebroadcasting=false;
rebroadcast_done.signalAll();
}
finally {
rebroadcast_lock.unlock();
}
}
private static void updateStats(ConcurrentMap<Address,StatsEntry> map, Address key, int req, int rsp, int missing) {
StatsEntry entry=map.get(key);
if(entry == null) {
entry=new StatsEntry();
StatsEntry tmp=map.putIfAbsent(key, entry);
if(tmp != null)
entry=tmp;
}
entry.xmit_reqs+=req;
entry.xmit_rsps+=rsp;
entry.missing_msgs_rcvd+=missing;
}
/**
* Sends a message msg to the requester. We have to wrap the original message into a retransmit message, as we need
* to preserve the original message's properties, such as src, headers etc.
* @param dest
* @param msg
*/
private void sendXmitRsp(Address dest, Message msg) {
Buffer buf;
if(msg == null) {
if(log.isErrorEnabled())
log.error("message is null, cannot send retransmission");
return;
}
if(stats) {
xmit_rsps_sent++;
updateStats(sent, dest, 0, 1, 0);
}
if(use_mcast_xmit)
dest=null;
if(msg.getSrc() == null)
msg.setSrc(local_addr);
try {
buf=Util.messageToByteBuffer(msg);
Message xmit_msg=new Message(dest, null, buf.getBuf(), buf.getOffset(), buf.getLength());
// changed Bela Jan 4 2007: we should not use OOB for retransmitted messages, otherwise we tax the
// OOB thread pool too much
// xmit_msg.setFlag(Message.OOB);
if(msg.isFlagSet(Message.OOB)) // set OOB for the wrapping message if the wrapped message is OOB, too
xmit_msg.setFlag(Message.OOB);
xmit_msg.putHeader(this.id, NakAckHeader.createXmitResponseHeader());
down_prot.down(new Event(Event.MSG, xmit_msg));
}
catch(IOException ex) {
log.error("failed marshalling xmit list", ex);
}
}
private void handleXmitRsp(Message msg) {
if(msg == null) {
if(log.isWarnEnabled())
log.warn("message is null");
return;
}
try {
Message wrapped_msg=Util.byteBufferToMessage(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.xmit_rsps_received.incrementAndGet();
}
if(stats) {
xmit_rsps_received++;
updateStats(received, msg.getSrc(), 0, 1, 0);
}
up(new Event(Event.MSG, wrapped_msg));
if(rebroadcasting) {
Digest tmp=getDigest();
boolean cancel_rebroadcasting;
rebroadcast_digest_lock.lock();
try {
cancel_rebroadcasting=tmp.isGreaterThanOrEqual(rebroadcast_digest);
}
finally {
rebroadcast_digest_lock.unlock();
}
if(cancel_rebroadcasting) {
cancelRebroadcasting();
}
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) {
log.error("failed reading retransmitted message", ex);
}
}
}
/**
* Takes the argument highest_seqnos and compares it to the current digest. If the current digest has fewer messages,
* then send retransmit messages for the missing messages. Return when all missing messages have been received. If
* we're waiting for a missing message from P, and P crashes while waiting, we need to exclude P from the wait set.
*/
private void rebroadcastMessages() {
Digest my_digest;
Map<Address,Digest.Entry> their_digest;
Address sender;
Digest.Entry their_entry, my_entry;
long their_high, my_high;
long sleep=max_rebroadcast_timeout / NUM_REBROADCAST_MSGS;
long wait_time=max_rebroadcast_timeout, start=System.currentTimeMillis();
while(wait_time > 0) {
rebroadcast_digest_lock.lock();
try {
if(rebroadcast_digest == null)
break;
their_digest=rebroadcast_digest.getSenders();
}
finally {
rebroadcast_digest_lock.unlock();
}
my_digest=getDigest();
boolean xmitted=false;
for(Map.Entry<Address,Digest.Entry> entry: their_digest.entrySet()) {
sender=entry.getKey();
their_entry=entry.getValue();
my_entry=my_digest.get(sender);
if(my_entry == null)
continue;
their_high=their_entry.getHighest();
// Cannot ask for 0 to be retransmitted because the first seqno in NAKACK and UNICAST(2) is always 1 !
// Also, we need to ask for retransmission of my_high+1, because we already *have* my_high, and don't
// need it, so the retransmission range is [my_high+1 .. their_high]: *exclude* my_high, but *include*
// their_high
my_high=Math.max(1, my_entry.getHighest() +1);
if(their_high > my_high) {
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] fetching " + my_high + "-" + their_high + " from " + sender);
retransmit(my_high, their_high, sender, true); // use multicast to send retransmit request
xmitted=true;
}
}
if(!xmitted)
return; // we're done; no retransmissions are needed anymore. our digest is >= rebroadcast_digest
rebroadcast_lock.lock();
try {
try {
my_digest=getDigest();
rebroadcast_digest_lock.lock();
try {
if(!rebroadcasting || my_digest.isGreaterThanOrEqual(rebroadcast_digest))
return;
}
finally {
rebroadcast_digest_lock.unlock();
}
rebroadcast_done.await(sleep, TimeUnit.MILLISECONDS);
wait_time-=(System.currentTimeMillis() - start);
}
catch(InterruptedException e) {
}
}
finally {
rebroadcast_lock.unlock();
}
}
}
/**
* Remove old members from NakReceiverWindows. Essentially removes all entries from xmit_table that are not
* in <code>members</code>. This method is not called concurrently multiple times
*/
private void adjustReceivers(List<Address> new_members) {
for(Address member: xmit_table.keySet()) {
if(!new_members.contains(member)) {
if(local_addr != null && local_addr.equals(member))
continue;
NakReceiverWindow win=xmit_table.remove(member);
win.destroy();
if(log.isDebugEnabled())
log.debug("removed " + member + " from xmit_table (not member anymore)");
}
}
}
/**
* Returns a message digest: for each member P the lowest, highest delivered and highest received seqno is added
*/
public Digest getDigest() {
final Map<Address,Digest.Entry> map=new HashMap<Address,Digest.Entry>();
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
Address sender=entry.getKey(); // guaranteed to be non-null (CCHM)
NakReceiverWindow win=entry.getValue(); // guaranteed to be non-null (CCHM)
long[] digest=win.getDigest();
map.put(sender, new Digest.Entry(digest[0], digest[1], digest[2]));
}
return new Digest(map);
}
/**
* Creates a NakReceiverWindow for each sender in the digest according to the sender's seqno. If NRW already exists,
* reset it.
*/
private void setDigest(Digest digest) {
setDigest(digest, false);
}
private void mergeDigest(Digest digest) {
setDigest(digest, true);
}
/**
* Overwrites existing entries, but does NOT remove entries not found in the digest
* @param digest
*/
private void overwriteDigest(Digest digest) {
if(digest == null)
return;
StringBuilder sb=new StringBuilder("\n[overwriteDigest()]\n");
sb.append("existing digest: " + getDigest()).append("\nnew digest: " + digest);
for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) {
Address sender=entry.getKey();
Digest.Entry val=entry.getValue();
if(sender == null || val == null)
continue;
long highest_delivered_seqno=val.getHighestDeliveredSeqno();
long low_seqno=val.getLow();
NakReceiverWindow win=xmit_table.get(sender);
if(win != null) {
if(local_addr.equals(sender)) {
win.setHighestDelivered(highest_delivered_seqno);
continue; // don't destroy my own window
}
xmit_table.remove(sender);
win.destroy(); // stops retransmission
}
win=createNakReceiverWindow(sender, highest_delivered_seqno, low_seqno);
xmit_table.put(sender, win);
}
sb.append("\n").append("resulting digest: " + getDigest());
digest_history.add(sb.toString());
if(log.isDebugEnabled())
log.debug(sb.toString());
}
/**
* Sets or merges the digest. If there is no entry for a given member in xmit_table, create a new NakReceiverWindow.
* Else skip the existing entry, unless it is a merge. In this case, skip the existing entry if its seqno is
* greater than or equal to the one in the digest, or reset the window and create a new one if not.
* @param digest The digest
* @param merge Whether to merge the new digest with our own, or not
*/
private void setDigest(Digest digest, boolean merge) {
if(digest == null)
return;
StringBuilder sb=new StringBuilder(merge? "\n[mergeDigest()]\n" : "\n[setDigest()]\n");
sb.append("existing digest: " + getDigest()).append("\nnew digest: " + digest);
boolean set_own_seqno=false;
for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) {
Address sender=entry.getKey();
Digest.Entry val=entry.getValue();
if(sender == null || val == null)
continue;
long highest_delivered_seqno=val.getHighestDeliveredSeqno();
long low_seqno=val.getLow();
NakReceiverWindow win=xmit_table.get(sender);
if(win != null) {
// We only reset the window if its seqno is lower than the seqno shipped with the digest. Also, we
if(!merge
|| (local_addr != null && local_addr.equals(sender)) // never overwrite our own entry
|| win.getHighestDelivered() >= highest_delivered_seqno) // my seqno is >= digest's seqno for sender
continue;
xmit_table.remove(sender);
win.destroy(); // stops retransmission
if(sender.equals(local_addr)) {
seqno_lock.lock();
try {
seqno=highest_delivered_seqno;
set_own_seqno=true;
}
finally {
seqno_lock.unlock();
}
}
}
win=createNakReceiverWindow(sender, highest_delivered_seqno, low_seqno);
xmit_table.put(sender, win);
}
sb.append("\n").append("resulting digest: " + getDigest());
if(set_own_seqno)
sb.append("\nnew seqno for " + local_addr + ": " + seqno);
digest_history.add(sb.toString());
if(log.isDebugEnabled())
log.debug(sb.toString());
}
private NakReceiverWindow createNakReceiverWindow(Address sender, long initial_seqno, long lowest_seqno) {
NakReceiverWindow win=new NakReceiverWindow(sender, this, initial_seqno, lowest_seqno, timer, true);
if(use_stats_for_retransmission) {
win.setRetransmitTimeouts(new ActualInterval(sender));
}
else if(exponential_backoff > 0) {
win.setRetransmitTimeouts(new ExponentialInterval(exponential_backoff));
}
else {
win.setRetransmitTimeouts(new StaticInterval(retransmit_timeouts));
}
win.setDiscardDeliveredMessages(discard_delivered_msgs);
if(stats)
win.setListener(this);
return win;
}
private void dumpXmitStats(String filename) throws IOException {
Writer out=new FileWriter(filename);
try {
TreeMap<Long,XmitTimeStat> map=new TreeMap<Long,XmitTimeStat>(xmit_time_stats);
StringBuilder sb;
XmitTimeStat stat;
out.write("time (secs) gaps-detected xmit-reqs-sent xmit-reqs-received xmit-rsps-sent xmit-rsps-received missing-msgs-received\n\n");
for(Map.Entry<Long,XmitTimeStat> entry: map.entrySet()) {
sb=new StringBuilder();
stat=entry.getValue();
sb.append(entry.getKey()).append(" ");
sb.append(stat.gaps_detected).append(" ");
sb.append(stat.xmit_reqs_sent).append(" ");
sb.append(stat.xmit_reqs_received).append(" ");
sb.append(stat.xmit_rsps_sent).append(" ");
sb.append(stat.xmit_rsps_received).append(" ");
sb.append(stat.missing_msgs_received).append("\n");
out.write(sb.toString());
}
}
finally {
out.close();
}
}
/**
* Garbage collect messages that have been seen by all members. Update sent_msgs: for the sender P in the digest
* which is equal to the local address, garbage collect all messages <= seqno at digest[P]. Update xmit_table:
* for each sender P in the digest and its highest seqno seen SEQ, garbage collect all delivered_msgs in the
* NakReceiverWindow corresponding to P which are <= seqno at digest[P].
*/
private void stable(Digest digest) {
NakReceiverWindow recv_win;
long my_highest_rcvd; // highest seqno received in my digest for a sender P
long stability_highest_rcvd; // highest seqno received in the stability vector for a sender P
if(members == null || local_addr == null || digest == null) {
if(log.isWarnEnabled())
log.warn("members, local_addr or digest are null !");
return;
}
if(log.isTraceEnabled()) {
log.trace("received stable digest " + digest);
}
stability_msgs.add(digest);
Address sender;
Digest.Entry val;
long high_seqno_delivered, high_seqno_received;
for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) {
sender=entry.getKey();
if(sender == null)
continue;
val=entry.getValue();
high_seqno_delivered=val.getHighestDeliveredSeqno();
high_seqno_received=val.getHighestReceivedSeqno();
// check whether the last seqno received for a sender P in the stability vector is > last seqno
// received for P in my digest. if yes, request retransmission (see "Last Message Dropped" topic
// in DESIGN)
recv_win=xmit_table.get(sender);
if(recv_win != null) {
my_highest_rcvd=recv_win.getHighestReceived();
stability_highest_rcvd=high_seqno_received;
if(stability_highest_rcvd >= 0 && stability_highest_rcvd > my_highest_rcvd) {
if(log.isTraceEnabled()) {
log.trace("my_highest_rcvd (" + my_highest_rcvd + ") < stability_highest_rcvd (" +
stability_highest_rcvd + "): requesting retransmission of " +
sender + '#' + stability_highest_rcvd);
}
retransmit(stability_highest_rcvd, stability_highest_rcvd, sender);
}
}
high_seqno_delivered-=gc_lag;
if(high_seqno_delivered < 0) {
continue;
}
if(log.isTraceEnabled())
log.trace("deleting msgs <= " + high_seqno_delivered + " from " + sender);
// delete *delivered* msgs that are stable
if(recv_win != null) {
recv_win.stable(high_seqno_delivered); // delete all messages with seqnos <= seqno
}
}
}
/**
* Implementation of Retransmitter.RetransmitCommand. Called by retransmission thread when gap is detected.
*/
public void retransmit(long first_seqno, long last_seqno, Address sender) {
retransmit(first_seqno, last_seqno, sender, false);
}
protected void retransmit(long first_seqno, long last_seqno, final Address sender, boolean multicast_xmit_request) {
NakAckHeader hdr;
Message retransmit_msg;
Address dest=sender; // to whom do we send the XMIT request ?
if(multicast_xmit_request || this.use_mcast_xmit_req) {
dest=null;
}
else {
if(xmit_from_random_member && !local_addr.equals(sender)) {
Address random_member=(Address)Util.pickRandomElement(members);
if(random_member != null && !local_addr.equals(random_member)) {
dest=random_member;
if(log.isTraceEnabled())
log.trace("picked random member " + dest + " to send XMIT request to");
}
}
}
hdr=NakAckHeader.createXmitRequestHeader(first_seqno, last_seqno, sender);
retransmit_msg=new Message(dest, null, null);
retransmit_msg.setFlag(Message.OOB);
if(log.isTraceEnabled())
log.trace(local_addr + ": sending XMIT_REQ ([" + first_seqno + ", " + last_seqno + "]) to " + dest);
retransmit_msg.putHeader(this.id, hdr);
ConcurrentMap<Long,Long> tmp=xmit_stats.get(sender);
if(tmp == null) {
tmp=new ConcurrentHashMap<Long,Long>();
ConcurrentMap<Long,Long> tmp2=xmit_stats.putIfAbsent(sender, tmp);
if(tmp2 != null)
tmp=tmp2;
}
for(long seq=first_seqno; seq < last_seqno; seq++) {
tmp.putIfAbsent(seq, System.currentTimeMillis());
}
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.xmit_reqs_sent.addAndGet((int)(last_seqno - first_seqno +1));
}
down_prot.down(new Event(Event.MSG, retransmit_msg));
if(stats) {
xmit_reqs_sent+=last_seqno - first_seqno +1;
updateStats(sent, sender, 1, 0, 0);
}
xmit_history.add(sender + ": " + first_seqno + "-" + last_seqno);
}
public void missingMessageReceived(long seqno, final Address original_sender) {
ConcurrentMap<Long,Long> tmp=xmit_stats.get(original_sender);
if(tmp != null) {
Long timestamp=tmp.remove(seqno);
if(timestamp != null) {
long diff=System.currentTimeMillis() - timestamp;
BoundedList<Long> list=xmit_times_history.get(original_sender);
if(list == null) {
list=new BoundedList<Long>(xmit_history_max_size);
BoundedList<Long> list2=xmit_times_history.putIfAbsent(original_sender, list);
if(list2 != null)
list=list2;
}
list.add(diff);
// compute the smoothed average for retransmission times for original_sender
// needs to be synchronized because we rely on the previous value for computation of the next value
synchronized(smoothed_avg_xmit_times) {
Double smoothed_avg=smoothed_avg_xmit_times.get(original_sender);
if(smoothed_avg == null)
smoothed_avg=INITIAL_SMOOTHED_AVG;
// the smoothed avg takes 90% of the previous value, 100% of the new value and averages them
// then, we add 10% to be on the safe side (an xmit value should rather err on the higher than lower side)
smoothed_avg=((smoothed_avg * WEIGHT) + diff) / 2;
smoothed_avg=smoothed_avg * (2 - WEIGHT);
smoothed_avg_xmit_times.put(original_sender, smoothed_avg);
}
}
}
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.missing_msgs_received.incrementAndGet();
}
if(stats) {
missing_msgs_received++;
updateStats(received, original_sender, 0, 0, 1);
}
}
/** Called when a message gap is detected */
public void messageGapDetected(long from, long to, Address src) {
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.gaps_detected.addAndGet((int)(to - from +1));
}
}
private void reset() {
seqno_lock.lock();
try {
seqno=0;
}
finally {
seqno_lock.unlock();
}
for(NakReceiverWindow win: xmit_table.values()) {
win.destroy();
}
xmit_table.clear();
}
@ManagedOperation(description="TODO")
public String printMessages() {
StringBuilder ret=new StringBuilder(local_addr + ":\n");
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
Address addr=entry.getKey();
NakReceiverWindow win=entry.getValue();
ret.append(addr).append(": ").append(win.toString()).append('\n');
}
return ret.toString();
}
@ManagedOperation(description="TODO")
public String printRetransmissionAvgs() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,BoundedList<Long>> entry: xmit_times_history.entrySet()) {
Address sender=entry.getKey();
BoundedList<Long> list=entry.getValue();
long tmp=0;
int i=0;
for(Long val: list) {
tmp+=val;
i++;
}
double avg=i > 0? tmp / i: -1;
sb.append(sender).append(": ").append(avg).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printSmoothedRetransmissionAvgs() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,Double> entry: smoothed_avg_xmit_times.entrySet()) {
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printRetransmissionTimes() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,BoundedList<Long>> entry: xmit_times_history.entrySet()) {
Address sender=entry.getKey();
BoundedList<Long> list=entry.getValue();
sb.append(sender).append(": ").append(list).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="Prints the last N retransmission requests")
public String printXmitHistory() {
StringBuilder sb=new StringBuilder();
for(String req: xmit_history)
sb.append(req).append("\n");
return sb.toString();
}
@ManagedAttribute
public double getTotalAverageRetransmissionTime() {
long total=0;
int i=0;
for(BoundedList<Long> list: xmit_times_history.values()) {
for(Long val: list) {
total+=val;
i++;
}
}
return i > 0? total / i: -1;
}
@ManagedAttribute
public double getTotalAverageSmoothedRetransmissionTime() {
double total=0.0;
int cnt=0;
synchronized(smoothed_avg_xmit_times) {
for(Double val: smoothed_avg_xmit_times.values()) {
if(val != null) {
total+=val;
cnt++;
}
}
}
return cnt > 0? total / cnt : -1;
}
/** Returns the smoothed average retransmission time for a given sender */
public double getSmoothedAverageRetransmissionTime(Address sender) {
synchronized(smoothed_avg_xmit_times) {
Double retval=smoothed_avg_xmit_times.get(sender);
if(retval == null) {
retval=INITIAL_SMOOTHED_AVG;
smoothed_avg_xmit_times.put(sender, retval);
}
return retval;
}
}
// ProbeHandler interface
public Map<String, String> handleProbe(String... keys) {
Map<String,String> retval=new HashMap<String,String>();
for(String key: keys) {
if(key.equals("digest-history"))
retval.put(key, printDigestHistory());
if(key.equals("dump-digest"))
retval.put(key, "\n" + printMessages());
}
return retval;
}
// ProbeHandler interface
public String[] supportedKeys() {
return new String[]{"digest-history", "dump-digest"};
}
// public static final class LossRate {
// private final Set<Long> received=new HashSet<Long>();
// private final Set<Long> missing=new HashSet<Long>();
// private double smoothed_loss_rate=0.0;
// public synchronized void addReceived(long seqno) {
// received.add(seqno);
// missing.remove(seqno);
// setSmoothedLossRate();
// public synchronized void addReceived(Long ... seqnos) {
// for(int i=0; i < seqnos.length; i++) {
// Long seqno=seqnos[i];
// received.add(seqno);
// missing.remove(seqno);
// setSmoothedLossRate();
// public synchronized void addMissing(long from, long to) {
// for(long i=from; i <= to; i++) {
// if(!received.contains(i))
// missing.add(i);
// setSmoothedLossRate();
// public synchronized double computeLossRate() {
// int num_missing=missing.size();
// if(num_missing == 0)
// return 0.0;
// int num_received=received.size();
// int total=num_missing + num_received;
// return num_missing / (double)total;
// public synchronized double getSmoothedLossRate() {
// return smoothed_loss_rate;
// public synchronized String toString() {
// StringBuilder sb=new StringBuilder();
// int num_missing=missing.size();
// int num_received=received.size();
// int total=num_missing + num_received;
// sb.append("total=").append(total).append(" (received=").append(received.size()).append(", missing=")
// .append(missing.size()).append(", loss rate=").append(computeLossRate())
// .append(", smoothed loss rate=").append(smoothed_loss_rate).append(")");
// return sb.toString();
// /** Set the new smoothed_loss_rate value to 70% of the new value and 30% of the old value */
// private void setSmoothedLossRate() {
// double new_loss_rate=computeLossRate();
// if(smoothed_loss_rate == 0) {
// smoothed_loss_rate=new_loss_rate;
// else {
// smoothed_loss_rate=smoothed_loss_rate * .3 + new_loss_rate * .7;
private static class XmitTimeStat {
final AtomicInteger gaps_detected=new AtomicInteger(0);
final AtomicInteger xmit_reqs_sent=new AtomicInteger(0);
final AtomicInteger xmit_reqs_received=new AtomicInteger(0);
final AtomicInteger xmit_rsps_sent=new AtomicInteger(0);
final AtomicInteger xmit_rsps_received=new AtomicInteger(0);
final AtomicInteger missing_msgs_received=new AtomicInteger(0);
}
private class ActualInterval implements Interval {
private final Address sender;
public ActualInterval(Address sender) {
this.sender=sender;
}
public long next() {
return (long)getSmoothedAverageRetransmissionTime(sender);
}
public Interval copy() {
return this;
}
}
static class StatsEntry {
long xmit_reqs, xmit_rsps, missing_msgs_rcvd;
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append(xmit_reqs).append(" xmit_reqs").append(", ").append(xmit_rsps).append(" xmit_rsps");
sb.append(", ").append(missing_msgs_rcvd).append(" missing msgs");
return sb.toString();
}
}
}
|
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.protocols.TP;
import org.jgroups.annotations.*;
import org.jgroups.conf.PropertyConverters;
import org.jgroups.stack.*;
import org.jgroups.util.*;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
/**
* Negative AcKnowledgement layer (NAKs). Messages are assigned a monotonically
* increasing sequence number (seqno). Receivers deliver messages ordered
* according to seqno and request retransmission of missing messages.<br/>
* Retransmit requests are usually sent to the original sender of a message, but
* this can be changed by xmit_from_random_member (send to random member) or
* use_mcast_xmit_req (send to everyone). Responses can also be sent to everyone
* instead of the requester by setting use_mcast_xmit to true.
*
* @author Bela Ban
* @version $Id: NAKACK.java,v 1.256 2010/08/27 09:26:08 belaban Exp $
*/
@MBean(description="Reliable transmission multipoint FIFO protocol")
@DeprecatedProperty(names={"max_xmit_size", "eager_lock_release", "stats_list_size"})
public class NAKACK extends Protocol implements Retransmitter.RetransmitCommand, NakReceiverWindow.Listener, TP.ProbeHandler {
/** the weight with which we take the previous smoothed average into account, WEIGHT should be >0 and <= 1 */
private static final double WEIGHT=0.9;
private static final double INITIAL_SMOOTHED_AVG=30.0;
private static final int NUM_REBROADCAST_MSGS=3;
@Property(name="retransmit_timeout", converter=PropertyConverters.LongArray.class, description="Timeout before requesting retransmissions. Default is 600, 1200, 2400, 4800")
private long[] retransmit_timeouts= { 600, 1200, 2400, 4800 }; // time(s) to wait before requesting retransmission
@Property(description="If true, retransmissions stats will be captured. Default is false")
boolean enable_xmit_time_stats=false;
@Property(description="Garbage collection lag")
private int gc_lag=20; // number of msgs garbage collection lags behind
@Property(description="Max number of messages to be removed from a NakReceiverWindow. This property might " +
"get removed anytime, so don't use it !")
private int max_msg_batch_size=20000;
/**
* Retransmit messages using multicast rather than unicast. This has the advantage that, if many receivers
* lost a message, the sender only retransmits once
*/
@Property(description="Retransmit messages using multicast rather than unicast")
private boolean use_mcast_xmit=true;
/**
* Use a multicast to request retransmission of missing messages. This may
* be costly as every member in the cluster will send a response
*/
@Property(description="Use a multicast to request retransmission of missing messages. Default is false")
private boolean use_mcast_xmit_req=false;
/**
* Ask a random member for retransmission of a missing message. If set to
* true, discard_delivered_msgs will be set to false
*/
@Property(description="Ask a random member for retransmission of a missing message. Default is false")
private boolean xmit_from_random_member=false;
/**
* The first value (in milliseconds) to use in the exponential backoff
* retransmission mechanism. Only enabled if the value is > 0
*/
@Property(description="The first value (in milliseconds) to use in the exponential backoff. Enabled if greater than 0. Default is 0")
private long exponential_backoff=0;
/**
* If enabled, we use statistics gathered from actual retransmission times
* to compute the new retransmission times
*/
@Property(description="Use statistics gathered from actual retransmission times to compute new retransmission times. Default is false")
private boolean use_stats_for_retransmission=false;
@Property(description="Whether to use the old retransmitter which retransmits individual messages or the new one " +
"which uses ranges of retransmitted messages. Default is true. Note that this property will be removed in 3.0; " +
"it is only used to switch back to the old (and proven) retransmitter mechanism if issues occur")
@Deprecated
private boolean use_range_based_retransmitter=true;
/**
* Messages that have been received in order are sent up the stack (=
* delivered to the application). Delivered messages are removed from
* NakReceiverWindow.xmit_table and moved to
* NakReceiverWindow.delivered_msgs, where they are later garbage collected
* (by STABLE). Since we do retransmits only from sent messages, never
* received or delivered messages, we can turn the moving to delivered_msgs
* off, so we don't keep the message around, and don't need to wait for
* garbage collection to remove them.
*/
@Property(description="Should messages delivered to application be discarded")
private boolean discard_delivered_msgs=false;
/**
* If value is > 0, the retransmit buffer is bounded: only the
* max_xmit_buf_size latest messages are kept, older ones are discarded when
* the buffer size is exceeded. A value <= 0 means unbounded buffers
*/
@Property(description="If value is > 0, the retransmit buffer is bounded. If value <= 0 unbounded buffers are used. Default is 0")
private int max_xmit_buf_size=0;
@Property(description="Size of retransmission history. Default is 50 entries")
private int xmit_history_max_size=50;
@Property(description="Timeout to rebroadcast messages. Default is 2000 msec")
private long max_rebroadcast_timeout=2000;
/**
* When not finding a message on an XMIT request, include the last N
* stability messages in the error message
*/
@Property(description="Should stability history be printed if we fail in retransmission. Default is false")
protected boolean print_stability_history_on_failed_xmit=false;
/** If true, logs messages discarded because received from other members */
@Property(description="discards warnings about promiscuous traffic")
private boolean log_discard_msgs=true;
@Property(description="If true, trashes warnings about retransmission messages not found in the xmit_table (used for testing)")
private boolean log_not_found_msgs=true;
@ManagedAttribute(description="Number of retransmit requests received")
private long xmit_reqs_received;
@ManagedAttribute(description="Number of retransmit requests sent")
private long xmit_reqs_sent;
@ManagedAttribute(description="Number of retransmit responses received")
private long xmit_rsps_received;
@ManagedAttribute(description="Number of retransmit responses sent")
private long xmit_rsps_sent;
@ManagedAttribute(description="Number of missing messages received")
private long missing_msgs_received;
/**
* Maintains retransmission related data across a time. Only used if enable_xmit_time_stats is set to true.
* At program termination, accumulated data is dumped to a file named by the address of the member.
* Careful, don't enable this in production as the data in this hashmap are
* never reaped ! Really only meant for diagnostics !
*/
private ConcurrentMap<Long,XmitTimeStat> xmit_time_stats=null;
private long xmit_time_stats_start;
/** Captures stats on XMIT_REQS, XMIT_RSPS per sender */
private ConcurrentMap<Address,StatsEntry> sent=new ConcurrentHashMap<Address,StatsEntry>();
/** Captures stats on XMIT_REQS, XMIT_RSPS per receiver */
private ConcurrentMap<Address,StatsEntry> received=new ConcurrentHashMap<Address,StatsEntry>();
/**
* Per-sender map of seqnos and timestamps, to keep track of avg times for retransmission of messages
*/
private final ConcurrentMap<Address,ConcurrentMap<Long,Long>> xmit_stats=new ConcurrentHashMap<Address,ConcurrentMap<Long,Long>>();
/**
* Maintains a list of the last N retransmission times (duration it took to retransmit a message) for all members
*/
private final ConcurrentMap<Address,BoundedList<Long>> xmit_times_history=new ConcurrentHashMap<Address,BoundedList<Long>>();
/**
* Maintains a smoothed average of the retransmission times per sender,
* these are the actual values that are used for new retransmission requests
*/
private final Map<Address,Double> smoothed_avg_xmit_times=new HashMap<Address,Double>();
/** Keeps the last 50 retransmit requests */
private final BoundedList<String> xmit_history=new BoundedList<String>(50);
private boolean is_server=false;
private Address local_addr=null;
private final List<Address> members=new CopyOnWriteArrayList<Address>();
private View view;
@GuardedBy("seqno_lock")
private long seqno=0; // current message sequence number (starts with 1)
private final Lock seqno_lock=new ReentrantLock();
/** Map to store sent and received messages (keyed by sender) */
private final ConcurrentMap<Address,NakReceiverWindow> xmit_table=new ConcurrentHashMap<Address,NakReceiverWindow>(11);
private volatile boolean leaving=false;
private volatile boolean running=false;
private TimeScheduler timer=null;
private final Lock rebroadcast_lock=new ReentrantLock();
private final Condition rebroadcast_done=rebroadcast_lock.newCondition();
// set during processing of a rebroadcast event
private volatile boolean rebroadcasting=false;
private final Lock rebroadcast_digest_lock=new ReentrantLock();
@GuardedBy("rebroadcast_digest_lock")
private Digest rebroadcast_digest=null;
/** BoundedList<Digest>, keeps the last 10 stability messages */
protected final BoundedList<Digest> stability_msgs=new BoundedList<Digest>(10);
/** Keeps a bounded list of the last N digest sets */
protected final BoundedList<String> digest_history=new BoundedList<String>(10);
/** <em>Regular</em> messages which have been added, but not removed */
private final AtomicInteger undelivered_msgs=new AtomicInteger(0);
public NAKACK() {
}
@ManagedAttribute
public int getUndeliveredMessages() {
return undelivered_msgs.get();
}
public long getXmitRequestsReceived() {return xmit_reqs_received;}
public long getXmitRequestsSent() {return xmit_reqs_sent;}
public long getXmitResponsesReceived() {return xmit_rsps_received;}
public long getXmitResponsesSent() {return xmit_rsps_sent;}
public long getMissingMessagesReceived() {return missing_msgs_received;}
@ManagedAttribute(description="Total number of missing messages")
public int getPendingRetransmissionRequests() {
int num=0;
for(NakReceiverWindow win: xmit_table.values()) {
num+=win.getPendingXmits();
}
return num;
}
@ManagedAttribute
public int getXmitTableSize() {
int num=0;
for(NakReceiverWindow win: xmit_table.values()) {
num+=win.size();
}
return num;
}
@ManagedOperation
public String printRetransmitStats() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet())
sb.append(entry.getKey()).append(": ").append(entry.getValue().printRetransmitStats()).append("\n");
return sb.toString();
}
public int getReceivedTableSize() {
return getPendingRetransmissionRequests();
}
/**
* Please don't use this method; it is only provided for unit testing !
* @param mbr
* @return
*/
public NakReceiverWindow getWindow(Address mbr) {
return xmit_table.get(mbr);
}
public void resetStats() {
xmit_reqs_received=xmit_reqs_sent=xmit_rsps_received=xmit_rsps_sent=missing_msgs_received=0;
sent.clear();
received.clear();
stability_msgs.clear();
digest_history.clear();
xmit_history.clear();
}
public void init() throws Exception {
if(enable_xmit_time_stats) {
if(log.isWarnEnabled())
log.warn("enable_xmit_time_stats is experimental, and may be removed in any release");
xmit_time_stats=new ConcurrentHashMap<Long,XmitTimeStat>();
xmit_time_stats_start=System.currentTimeMillis();
}
if(xmit_from_random_member) {
if(discard_delivered_msgs) {
discard_delivered_msgs=false;
log.warn("xmit_from_random_member set to true: changed discard_delivered_msgs to false");
}
}
TP transport=getTransport();
if(transport != null) {
transport.registerProbeHandler(this);
if(!transport.supportsMulticasting()) {
if(use_mcast_xmit) {
log.warn("use_mcast_xmit should not be used because the transport (" + transport.getName() +
") does not support IP multicasting; setting use_mcast_xmit to false");
use_mcast_xmit=false;
}
if(use_mcast_xmit_req) {
log.warn("use_mcast_xmit_req should not be used because the transport (" + transport.getName() +
") does not support IP multicasting; setting use_mcast_xmit_req to false");
use_mcast_xmit_req=false;
}
}
}
}
public int getGcLag() {
return gc_lag;
}
public void setGcLag(int gc_lag) {
this.gc_lag=gc_lag;
}
public boolean isUseMcastXmit() {
return use_mcast_xmit;
}
public void setUseMcastXmit(boolean use_mcast_xmit) {
this.use_mcast_xmit=use_mcast_xmit;
}
public boolean isXmitFromRandomMember() {
return xmit_from_random_member;
}
public void setXmitFromRandomMember(boolean xmit_from_random_member) {
this.xmit_from_random_member=xmit_from_random_member;
}
public boolean isDiscardDeliveredMsgs() {
return discard_delivered_msgs;
}
public void setDiscardDeliveredMsgs(boolean discard_delivered_msgs) {
boolean old=this.discard_delivered_msgs;
this.discard_delivered_msgs=discard_delivered_msgs;
if(old != this.discard_delivered_msgs) {
for(NakReceiverWindow win: xmit_table.values()) {
win.setDiscardDeliveredMessages(this.discard_delivered_msgs);
}
}
}
public int getMaxXmitBufSize() {
return max_xmit_buf_size;
}
public void setMaxXmitBufSize(int max_xmit_buf_size) {
this.max_xmit_buf_size=max_xmit_buf_size;
}
/**
*
* @return
* @deprecated removed in 2.6
*/
public long getMaxXmitSize() {
return -1;
}
/**
*
* @param max_xmit_size
* @deprecated removed in 2.6
*/
public void setMaxXmitSize(long max_xmit_size) {
}
public void setLogDiscardMessages(boolean flag) {
log_discard_msgs=flag;
}
public void setLogDiscardMsgs(boolean flag) {
setLogDiscardMessages(flag);
}
public boolean getLogDiscardMessages() {
return log_discard_msgs;
}
public Map<String,Object> dumpStats() {
Map<String,Object> retval=super.dumpStats();
retval.put("msgs", printMessages());
return retval;
}
public String printStats() {
StringBuilder sb=new StringBuilder();
sb.append("sent:\n");
for(Iterator<Map.Entry<Address, StatsEntry>> it=sent.entrySet().iterator(); it.hasNext();) {
Map.Entry<Address, StatsEntry> entry=it.next();
Object key=entry.getKey();
if(key == null || key == Global.NULL) key="<mcast dest>";
StatsEntry val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nreceived:\n");
for(Iterator<Map.Entry<Address, StatsEntry>> it=received.entrySet().iterator(); it.hasNext();) {
Map.Entry<Address, StatsEntry> entry=it.next();
Object key=entry.getKey();
if(key == null || key == Global.NULL) key="<mcast dest>";
StatsEntry val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nStability messages received\n");
sb.append(printStabilityMessages()).append("\n");
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printStabilityMessages() {
StringBuilder sb=new StringBuilder();
sb.append(Util.printListWithDelimiter(stability_msgs, "\n"));
return sb.toString();
}
public String printStabilityHistory() {
StringBuilder sb=new StringBuilder();
int i=1;
for(Digest digest: stability_msgs) {
sb.append(i++).append(": ").append(digest).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="Keeps information about the last N times a digest was set or merged")
public String printDigestHistory() {
StringBuilder sb=new StringBuilder(local_addr + ":\n");
for(String tmp: digest_history)
sb.append(tmp).append("\n");
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printLossRates() {
StringBuilder sb=new StringBuilder();
NakReceiverWindow win;
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
win=entry.getValue();
sb.append(entry.getKey()).append(": ").append(win.printLossRate()).append("\n");
}
return sb.toString();
}
@ManagedAttribute
public double getAverageLossRate() {
double retval=0.0;
int count=0;
if(xmit_table.isEmpty())
return 0.0;
for(NakReceiverWindow win: xmit_table.values()) {
retval+=win.getLossRate();
count++;
}
return retval / (double)count;
}
@ManagedAttribute
public double getAverageSmoothedLossRate() {
double retval=0.0;
int count=0;
if(xmit_table.isEmpty())
return 0.0;
for(NakReceiverWindow win: xmit_table.values()) {
retval+=win.getSmoothedLossRate();
count++;
}
return retval / (double)count;
}
public Vector<Integer> providedUpServices() {
Vector<Integer> retval=new Vector<Integer>(5);
retval.addElement(new Integer(Event.GET_DIGEST));
retval.addElement(new Integer(Event.SET_DIGEST));
retval.addElement(new Integer(Event.OVERWRITE_DIGEST));
retval.addElement(new Integer(Event.MERGE_DIGEST));
return retval;
}
public void start() throws Exception {
timer=getTransport().getTimer();
if(timer == null)
throw new Exception("timer is null");
running=true;
leaving=false;
if(xmit_time_stats != null) {
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
String filename="xmit-stats-" + local_addr + ".log";
try {
dumpXmitStats(filename);
}
catch(IOException e) {
e.printStackTrace();
}
}
});
}
}
public void stop() {
running=false;
reset(); // clears sent_msgs and destroys all NakReceiverWindows
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>down_prot.down()</code> in this
* method as the event is passed down by default by the superclass after this method returns !</b>
*/
public Object down(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress()) {
break; // unicast address: not null and not mcast, pass down unchanged
}
send(evt, msg);
return null; // don't pass down the stack
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return null; // do not pass down further (Bela Aug 7 2001)
case Event.GET_DIGEST:
return getDigest();
case Event.SET_DIGEST:
setDigest((Digest)evt.getArg());
return null;
case Event.OVERWRITE_DIGEST:
overwriteDigest((Digest)evt.getArg());
return null;
case Event.MERGE_DIGEST:
mergeDigest((Digest)evt.getArg());
return null;
case Event.TMP_VIEW:
View tmp_view=(View)evt.getArg();
Vector<Address> mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
// adjustReceivers(false);
break;
case Event.VIEW_CHANGE:
tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
view=tmp_view;
adjustReceivers(members);
is_server=true; // check vids from now on
Set<Address> tmp=new LinkedHashSet<Address>(members);
tmp.add(null); // for null destination (= mcast)
sent.keySet().retainAll(tmp);
received.keySet().retainAll(tmp);
xmit_stats.keySet().retainAll(tmp);
// in_progress.keySet().retainAll(mbrs); // remove elements which are not in the membership
break;
case Event.BECOME_SERVER:
is_server=true;
break;
case Event.SET_LOCAL_ADDRESS:
local_addr=(Address)evt.getArg();
break;
case Event.DISCONNECT:
leaving=true;
reset();
break;
case Event.REBROADCAST:
rebroadcasting=true;
rebroadcast_digest=(Digest)evt.getArg();
try {
rebroadcastMessages();
}
finally {
rebroadcasting=false;
rebroadcast_digest_lock.lock();
try {
rebroadcast_digest=null;
}
finally {
rebroadcast_digest_lock.unlock();
}
}
return null;
}
return down_prot.down(evt);
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>PassUp</code> in this
* method as the event is passed up by default by the superclass after this method returns !</b>
*/
public Object up(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
NakAckHeader hdr=(NakAckHeader)msg.getHeader(this.id);
if(hdr == null)
break; // pass up (e.g. unicast msg)
// discard messages while not yet server (i.e., until JOIN has returned)
if(!is_server) {
if(log.isTraceEnabled())
log.trace("message was discarded (not yet server)");
return null;
}
// Changed by bela Jan 29 2003: we must not remove the header, otherwise further xmit requests will fail !
//hdr=(NakAckHeader)msg.removeHeader(getName());
switch(hdr.type) {
case NakAckHeader.MSG:
handleMessage(msg, hdr);
return null; // transmitter passes message up for us !
case NakAckHeader.XMIT_REQ:
if(hdr.range == null) {
if(log.isErrorEnabled()) {
log.error("XMIT_REQ: range of xmit msg is null; discarding request from " + msg.getSrc());
}
return null;
}
handleXmitReq(msg.getSrc(), hdr.range.low, hdr.range.high, hdr.sender);
return null;
case NakAckHeader.XMIT_RSP:
handleXmitRsp(msg);
return null;
default:
if(log.isErrorEnabled()) {
log.error("NakAck header type " + hdr.type + " not known !");
}
return null;
}
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return null; // do not pass up further (Bela Aug 7 2001)
case Event.SUSPECT:
// release the promise if rebroadcasting is in progress... otherwise we wait forever. there will be a new
// flush round anyway
if(rebroadcasting) {
cancelRebroadcasting();
}
break;
}
return up_prot.up(evt);
}
private void send(Event evt, Message msg) {
if(msg == null)
throw new NullPointerException("msg is null; event is " + evt);
if(!running) {
if(log.isTraceEnabled())
log.trace("[" + local_addr + "] discarded message as we're not in the 'running' state, message: " + msg);
return;
}
long msg_id;
NakReceiverWindow win=xmit_table.get(local_addr);
if(win == null) { // discard message if there is no entry for local_addr
if(log.isWarnEnabled() && log_discard_msgs)
log.warn(local_addr + ": discarded message from " + local_addr + " with no window, my view is " + view);
return;
}
msg.setSrc(local_addr); // this needs to be done so we can check whether the message sender is the local_addr
seqno_lock.lock();
try {
try { // incrementing seqno and adding the msg to sent_msgs needs to be atomic
msg_id=seqno +1;
msg.putHeader(this.id, NakAckHeader.createMessageHeader(msg_id));
if(win.add(msg_id, msg) && !msg.isFlagSet(Message.OOB))
undelivered_msgs.incrementAndGet();
seqno=msg_id;
}
catch(Throwable t) {
throw new RuntimeException("failure adding msg " + msg + " to the retransmit table for " + local_addr, t);
}
}
finally {
seqno_lock.unlock();
}
try {
if(log.isTraceEnabled())
log.trace("sending " + local_addr + "#" + msg_id);
down_prot.down(evt); // if this fails, since msg is in sent_msgs, it can be retransmitted
}
catch(Throwable t) { // eat the exception, don't pass it up the stack
if(log.isWarnEnabled()) {
log.warn("failure passing message down", t);
}
}
}
/**
* Finds the corresponding NakReceiverWindow and adds the message to it (according to seqno). Then removes as many
* messages as possible from the NRW and passes them up the stack. Discards messages from non-members.
*/
private void handleMessage(Message msg, NakAckHeader hdr) {
Address sender=msg.getSrc();
if(sender == null) {
if(log.isErrorEnabled())
log.error("sender of message is null");
return;
}
if(log.isTraceEnabled())
log.trace(new StringBuilder().append(local_addr).append(": received ").append(sender).append('#').append(hdr.seqno));
NakReceiverWindow win=xmit_table.get(sender);
if(win == null) { // discard message if there is no entry for sender
if(leaving)
return;
if(log.isWarnEnabled() && log_discard_msgs)
log.warn(local_addr + ": dropped message from " + sender +
" (not in xmit_table), keys are " + xmit_table.keySet() +", view=" + view);
return;
}
boolean loopback=local_addr.equals(sender);
boolean added_to_window=false;
boolean added=loopback || (added_to_window=win.add(hdr.seqno, msg));
if(added_to_window && !msg.isFlagSet(Message.OOB))
undelivered_msgs.incrementAndGet();
// message is passed up if OOB. Later, when remove() is called, we discard it. This affects ordering !
if(msg.isFlagSet(Message.OOB)) {
if(added) {
msg=win.get(hdr.seqno);
if(msg != null && msg.isFlagSet(Message.OOB)) {
if(msg.setTransientFlagIfAbsent(Message.OOB_DELIVERED))
up_prot.up(new Event(Event.MSG, msg));
}
}
List<Message> msgs;
while(!(msgs=win.removeOOBMessages()).isEmpty()) {
for(Message tmp_msg: msgs) {
if(tmp_msg.setTransientFlagIfAbsent(Message.OOB_DELIVERED)) {
up_prot.up(new Event(Event.MSG, tmp_msg));
}
}
}
if(!(win.hasMessagesToRemove() && undelivered_msgs.get() > 0))
return;
}
// Efficient way of checking whether another thread is already processing messages from 'sender'.
// If that's the case, we return immediately and let the existing thread process our message
// can be returned to the thread pool
final AtomicBoolean processing=win.getProcessing();
if(!processing.compareAndSet(false, true)) {
return;
}
// where lots of threads can come up to this point concurrently, but only 1 is allowed to pass at a time
// We *can* deliver messages from *different* senders concurrently, e.g. reception of P1, Q1, P2, Q2 can result in
// delivery of P1, Q1, Q2, P2: FIFO (implemented by NAKACK) says messages need to be delivered in the
// order in which they were sent by the sender
int num_regular_msgs_removed=0;
// 2nd line of defense: in case of an exception, remove() might not be called, therefore processing would never
// be set back to false. If we get an exception and released_processing is not true, then we set
// processing to false in the finally clause
boolean released_processing=false;
try {
while(true) {
// we're removing a msg and set processing to false (if null) *atomically* (wrt to add())
List<Message> msgs=win.removeMany(processing, max_msg_batch_size);
if(msgs == null || msgs.isEmpty()) {
released_processing=true;
return;
}
for(final Message msg_to_deliver: msgs) {
if(msg_to_deliver.isFlagSet(Message.OOB)) {
if(msg_to_deliver.setTransientFlagIfAbsent(Message.OOB_DELIVERED)) {
timer.execute(new Runnable() {
public void run() {
up_prot.up(new Event(Event.MSG, msg_to_deliver));
}
});
}
continue;
}
num_regular_msgs_removed++;
// Changed by bela Jan 29 2003: not needed (see above)
//msg_to_deliver.removeHeader(getName());
try {
up_prot.up(new Event(Event.MSG, msg_to_deliver));
}
catch(Throwable t) {
log.error("couldn't deliver message " + msg_to_deliver, t);
}
}
}
}
finally {
// We keep track of regular messages that we added, but couldn't remove (because of ordering).
// When we have such messages pending, then even OOB threads will remove and process them
undelivered_msgs.addAndGet(-num_regular_msgs_removed);
// processing is always set in win.remove(processing) above and never here ! This code is just a
// 2nd line of defense should there be an exception before win.remove(processing) sets processing
if(!released_processing)
processing.set(false);
}
}
/**
* Retransmits messsages first_seqno to last_seqno from original_sender from xmit_table to xmit_requester,
* called when XMIT_REQ is received.
* @param xmit_requester The sender of the XMIT_REQ, we have to send the requested copy of the message to this address
* @param first_seqno The first sequence number to be retransmitted (<= last_seqno)
* @param last_seqno The last sequence number to be retransmitted (>= first_seqno)
* @param original_sender The member who originally sent the messsage. Guaranteed to be non-null
*/
private void handleXmitReq(Address xmit_requester, long first_seqno, long last_seqno, Address original_sender) {
Message msg;
if(log.isTraceEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append(local_addr).append(": received xmit request from ").append(xmit_requester).append(" for ");
sb.append(original_sender).append(" [").append(first_seqno).append(" - ").append(last_seqno).append("]");
log.trace(sb.toString());
}
if(first_seqno > last_seqno)
return;
if(stats) {
xmit_reqs_received+=last_seqno - first_seqno +1;
updateStats(received, xmit_requester, 1, 0, 0);
}
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.xmit_reqs_received.addAndGet((int)(last_seqno - first_seqno +1));
stat.xmit_rsps_sent.addAndGet((int)(last_seqno - first_seqno +1));
}
NakReceiverWindow win=xmit_table.get(original_sender);
if(win == null) {
if(log.isErrorEnabled()) {
StringBuilder sb=new StringBuilder();
sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr);
sb.append(") ").append(original_sender).append(" not found in retransmission table");
// don't print the table unless we are in trace mode because it can be LARGE
if (log.isTraceEnabled()) {
sb.append(":\n").append(printMessages());
}
if(print_stability_history_on_failed_xmit) {
sb.append(" (stability history:\n").append(printStabilityHistory());
}
log.error(sb.toString());
}
return;
}
for(long i=first_seqno; i <= last_seqno; i++) {
msg=win.get(i);
if(msg == null) {
if(log.isWarnEnabled() && log_not_found_msgs && !local_addr.equals(xmit_requester)) {
StringBuilder sb=new StringBuilder();
sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr);
sb.append(") message ").append(original_sender).append("::").append(i);
sb.append(" not found in retransmission table of ").append(original_sender).append(":\n").append(win);
if(print_stability_history_on_failed_xmit) {
sb.append(" (stability history:\n").append(printStabilityHistory());
}
log.warn(sb.toString());
}
continue;
}
sendXmitRsp(xmit_requester, msg, i);
}
}
private void cancelRebroadcasting() {
rebroadcast_lock.lock();
try {
rebroadcasting=false;
rebroadcast_done.signalAll();
}
finally {
rebroadcast_lock.unlock();
}
}
private static void updateStats(ConcurrentMap<Address,StatsEntry> map, Address key, int req, int rsp, int missing) {
StatsEntry entry=map.get(key);
if(entry == null) {
entry=new StatsEntry();
StatsEntry tmp=map.putIfAbsent(key, entry);
if(tmp != null)
entry=tmp;
}
entry.xmit_reqs+=req;
entry.xmit_rsps+=rsp;
entry.missing_msgs_rcvd+=missing;
}
/**
* Sends a message msg to the requester. We have to wrap the original message into a retransmit message, as we need
* to preserve the original message's properties, such as src, headers etc.
* @param dest
* @param msg
* @param seqno
*/
private void sendXmitRsp(Address dest, Message msg, long seqno) {
Buffer buf;
if(msg == null) {
if(log.isErrorEnabled())
log.error("message is null, cannot send retransmission");
return;
}
if(stats) {
xmit_rsps_sent++;
updateStats(sent, dest, 0, 1, 0);
}
if(use_mcast_xmit)
dest=null;
if(msg.getSrc() == null)
msg.setSrc(local_addr);
try {
buf=Util.messageToByteBuffer(msg);
Message xmit_msg=new Message(dest, null, buf.getBuf(), buf.getOffset(), buf.getLength());
// changed Bela Jan 4 2007: we should not use OOB for retransmitted messages, otherwise we tax the
// OOB thread pool too much
// xmit_msg.setFlag(Message.OOB);
if(msg.isFlagSet(Message.OOB)) // set OOB for the wrapping message if the wrapped message is OOB, too
xmit_msg.setFlag(Message.OOB);
xmit_msg.putHeader(this.id, NakAckHeader.createXmitResponseHeader());
down_prot.down(new Event(Event.MSG, xmit_msg));
}
catch(IOException ex) {
log.error("failed marshalling xmit list", ex);
}
}
private void handleXmitRsp(Message msg) {
if(msg == null) {
if(log.isWarnEnabled())
log.warn("message is null");
return;
}
try {
Message wrapped_msg=Util.byteBufferToMessage(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.xmit_rsps_received.incrementAndGet();
}
if(stats) {
xmit_rsps_received++;
updateStats(received, msg.getSrc(), 0, 1, 0);
}
up(new Event(Event.MSG, wrapped_msg));
if(rebroadcasting) {
Digest tmp=getDigest();
boolean cancel_rebroadcasting;
rebroadcast_digest_lock.lock();
try {
cancel_rebroadcasting=tmp.isGreaterThanOrEqual(rebroadcast_digest);
}
finally {
rebroadcast_digest_lock.unlock();
}
if(cancel_rebroadcasting) {
cancelRebroadcasting();
}
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) {
log.error("failed reading retransmitted message", ex);
}
}
}
/**
* Takes the argument highest_seqnos and compares it to the current digest. If the current digest has fewer messages,
* then send retransmit messages for the missing messages. Return when all missing messages have been received. If
* we're waiting for a missing message from P, and P crashes while waiting, we need to exclude P from the wait set.
*/
private void rebroadcastMessages() {
Digest my_digest;
Map<Address,Digest.Entry> their_digest;
Address sender;
Digest.Entry their_entry, my_entry;
long their_high, my_high;
long sleep=max_rebroadcast_timeout / NUM_REBROADCAST_MSGS;
long wait_time=max_rebroadcast_timeout, start=System.currentTimeMillis();
while(wait_time > 0) {
rebroadcast_digest_lock.lock();
try {
if(rebroadcast_digest == null)
break;
their_digest=rebroadcast_digest.getSenders();
}
finally {
rebroadcast_digest_lock.unlock();
}
my_digest=getDigest();
boolean xmitted=false;
for(Map.Entry<Address,Digest.Entry> entry: their_digest.entrySet()) {
sender=entry.getKey();
their_entry=entry.getValue();
my_entry=my_digest.get(sender);
if(my_entry == null)
continue;
their_high=their_entry.getHighest();
my_high=my_entry.getHighest();
if(their_high > my_high) {
if(log.isTraceEnabled())
log.trace("sending XMIT request to " + sender + " for messages " + my_high + " - " + their_high);
retransmit(my_high, their_high, sender, true); // use multicast to send retransmit request
xmitted=true;
}
}
if(!xmitted)
return; // we're done; no retransmissions are needed anymore. our digest is >= rebroadcast_digest
rebroadcast_lock.lock();
try {
try {
my_digest=getDigest();
rebroadcast_digest_lock.lock();
try {
if(!rebroadcasting || my_digest.isGreaterThanOrEqual(rebroadcast_digest))
return;
}
finally {
rebroadcast_digest_lock.unlock();
}
rebroadcast_done.await(sleep, TimeUnit.MILLISECONDS);
wait_time-=(System.currentTimeMillis() - start);
}
catch(InterruptedException e) {
}
}
finally {
rebroadcast_lock.unlock();
}
}
}
/**
* Remove old members from NakReceiverWindows. Essentially removes all entries from xmit_table that are not
* in <code>members</code>. This method is not called concurrently multiple times
*/
private void adjustReceivers(List<Address> new_members) {
for(Address member: xmit_table.keySet()) {
if(!new_members.contains(member)) {
if(local_addr != null && local_addr.equals(member))
continue;
NakReceiverWindow win=xmit_table.remove(member);
win.destroy();
if(log.isDebugEnabled())
log.debug("removed " + member + " from xmit_table (not member anymore)");
}
}
}
/**
* Returns a message digest: for each member P the lowest, highest delivered and highest received seqno is added
*/
public Digest getDigest() {
final Map<Address,Digest.Entry> map=new HashMap<Address,Digest.Entry>();
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
Address sender=entry.getKey(); // guaranteed to be non-null (CCHM)
NakReceiverWindow win=entry.getValue(); // guaranteed to be non-null (CCHM)
long[] digest=win.getDigest();
map.put(sender, new Digest.Entry(digest[0], digest[1], digest[2]));
}
return new Digest(map);
}
/**
* Creates a NakReceiverWindow for each sender in the digest according to the sender's seqno. If NRW already exists,
* reset it.
*/
private void setDigest(Digest digest) {
setDigest(digest, false);
}
private void mergeDigest(Digest digest) {
setDigest(digest, true);
}
/**
* Overwrites existing entries, but does NOT remove entries not found in the digest
* @param digest
*/
private void overwriteDigest(Digest digest) {
if(digest == null)
return;
StringBuilder sb=new StringBuilder("\n[overwriteDigest()]\n");
sb.append("existing digest: " + getDigest()).append("\nnew digest: " + digest);
for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) {
Address sender=entry.getKey();
Digest.Entry val=entry.getValue();
if(sender == null || val == null)
continue;
long highest_delivered_seqno=val.getHighestDeliveredSeqno();
long low_seqno=val.getLow();
NakReceiverWindow win=xmit_table.get(sender);
if(win != null) {
win.destroy(); // stops retransmission
xmit_table.remove(sender);
}
win=createNakReceiverWindow(sender, highest_delivered_seqno, low_seqno);
xmit_table.put(sender, win);
}
sb.append("\n").append("resulting digest: " + getDigest());
digest_history.add(sb.toString());
if(log.isDebugEnabled())
log.debug(sb.toString());
}
/**
* Sets or merges the digest. If there is no entry for a given member in xmit_table, create a new NakReceiverWindow.
* Else skip the existing entry, unless it is a merge. In this case, skip the existing entry if its seqno is
* greater than or equal to the one in the digest, or reset the window and create a new one if not.
* @param digest The digest
* @param merge Whether to merge the new digest with our own, or not
*/
private void setDigest(Digest digest, boolean merge) {
if(digest == null)
return;
StringBuilder sb=new StringBuilder(merge? "\n[mergeDigest()]\n" : "\n[setDigest()]\n");
sb.append("existing digest: " + getDigest()).append("\nnew digest: " + digest);
for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) {
Address sender=entry.getKey();
Digest.Entry val=entry.getValue();
if(sender == null || val == null)
continue;
long highest_delivered_seqno=val.getHighestDeliveredSeqno();
long low_seqno=val.getLow();
NakReceiverWindow win=xmit_table.get(sender);
if(win != null) {
// We only reset the window if its seqno is lower than the seqno shipped with the digest. Also, we
if(!merge
|| (local_addr != null && local_addr.equals(sender)) // never overwrite our own entry
|| win.getHighestDelivered() >= highest_delivered_seqno) // my seqno is >= digest's seqno for sender
continue;
win.destroy(); // stops retransmission
xmit_table.remove(sender);
}
win=createNakReceiverWindow(sender, highest_delivered_seqno, low_seqno);
xmit_table.put(sender, win);
}
sb.append("\n").append("resulting digest: " + getDigest());
digest_history.add(sb.toString());
if(log.isDebugEnabled())
log.debug(sb.toString());
}
private NakReceiverWindow createNakReceiverWindow(Address sender, long initial_seqno, long lowest_seqno) {
NakReceiverWindow win=new NakReceiverWindow(local_addr, sender, this, initial_seqno, lowest_seqno, timer,
use_range_based_retransmitter);
if(use_stats_for_retransmission) {
win.setRetransmitTimeouts(new ActualInterval(sender));
}
else if(exponential_backoff > 0) {
win.setRetransmitTimeouts(new ExponentialInterval(exponential_backoff));
}
else {
win.setRetransmitTimeouts(new StaticInterval(retransmit_timeouts));
}
win.setDiscardDeliveredMessages(discard_delivered_msgs);
win.setMaxXmitBufSize(this.max_xmit_buf_size);
if(stats)
win.setListener(this);
return win;
}
private void dumpXmitStats(String filename) throws IOException {
Writer out=new FileWriter(filename);
try {
TreeMap<Long,XmitTimeStat> map=new TreeMap<Long,XmitTimeStat>(xmit_time_stats);
StringBuilder sb;
XmitTimeStat stat;
out.write("time (secs) gaps-detected xmit-reqs-sent xmit-reqs-received xmit-rsps-sent xmit-rsps-received missing-msgs-received\n\n");
for(Map.Entry<Long,XmitTimeStat> entry: map.entrySet()) {
sb=new StringBuilder();
stat=entry.getValue();
sb.append(entry.getKey()).append(" ");
sb.append(stat.gaps_detected).append(" ");
sb.append(stat.xmit_reqs_sent).append(" ");
sb.append(stat.xmit_reqs_received).append(" ");
sb.append(stat.xmit_rsps_sent).append(" ");
sb.append(stat.xmit_rsps_received).append(" ");
sb.append(stat.missing_msgs_received).append("\n");
out.write(sb.toString());
}
}
finally {
out.close();
}
}
/**
* Garbage collect messages that have been seen by all members. Update sent_msgs: for the sender P in the digest
* which is equal to the local address, garbage collect all messages <= seqno at digest[P]. Update xmit_table:
* for each sender P in the digest and its highest seqno seen SEQ, garbage collect all delivered_msgs in the
* NakReceiverWindow corresponding to P which are <= seqno at digest[P].
*/
private void stable(Digest digest) {
NakReceiverWindow recv_win;
long my_highest_rcvd; // highest seqno received in my digest for a sender P
long stability_highest_rcvd; // highest seqno received in the stability vector for a sender P
if(members == null || local_addr == null || digest == null) {
if(log.isWarnEnabled())
log.warn("members, local_addr or digest are null !");
return;
}
if(log.isTraceEnabled()) {
log.trace("received stable digest " + digest);
}
stability_msgs.add(digest);
Address sender;
Digest.Entry val;
long high_seqno_delivered, high_seqno_received;
for(Map.Entry<Address, Digest.Entry> entry: digest.getSenders().entrySet()) {
sender=entry.getKey();
if(sender == null)
continue;
val=entry.getValue();
high_seqno_delivered=val.getHighestDeliveredSeqno();
high_seqno_received=val.getHighestReceivedSeqno();
// check whether the last seqno received for a sender P in the stability vector is > last seqno
// received for P in my digest. if yes, request retransmission (see "Last Message Dropped" topic
// in DESIGN)
recv_win=xmit_table.get(sender);
if(recv_win != null) {
my_highest_rcvd=recv_win.getHighestReceived();
stability_highest_rcvd=high_seqno_received;
if(stability_highest_rcvd >= 0 && stability_highest_rcvd > my_highest_rcvd) {
if(log.isTraceEnabled()) {
log.trace("my_highest_rcvd (" + my_highest_rcvd + ") < stability_highest_rcvd (" +
stability_highest_rcvd + "): requesting retransmission of " +
sender + '#' + stability_highest_rcvd);
}
retransmit(stability_highest_rcvd, stability_highest_rcvd, sender);
}
}
high_seqno_delivered-=gc_lag;
if(high_seqno_delivered < 0) {
continue;
}
if(log.isTraceEnabled())
log.trace("deleting msgs <= " + high_seqno_delivered + " from " + sender);
// delete *delivered* msgs that are stable
if(recv_win != null) {
recv_win.stable(high_seqno_delivered); // delete all messages with seqnos <= seqno
}
}
}
/**
* Implementation of Retransmitter.RetransmitCommand. Called by retransmission thread when gap is detected.
*/
public void retransmit(long first_seqno, long last_seqno, Address sender) {
retransmit(first_seqno, last_seqno, sender, false);
}
protected void retransmit(long first_seqno, long last_seqno, final Address sender, boolean multicast_xmit_request) {
NakAckHeader hdr;
Message retransmit_msg;
Address dest=sender; // to whom do we send the XMIT request ?
if(multicast_xmit_request || this.use_mcast_xmit_req) {
dest=null;
}
else {
if(xmit_from_random_member && !local_addr.equals(sender)) {
Address random_member=(Address)Util.pickRandomElement(members);
if(random_member != null && !local_addr.equals(random_member)) {
dest=random_member;
if(log.isTraceEnabled())
log.trace("picked random member " + dest + " to send XMIT request to");
}
}
}
hdr=NakAckHeader.createXmitRequestHeader(first_seqno, last_seqno, sender);
retransmit_msg=new Message(dest, null, null);
retransmit_msg.setFlag(Message.OOB);
if(log.isTraceEnabled())
log.trace(local_addr + ": sending XMIT_REQ ([" + first_seqno + ", " + last_seqno + "]) to " + dest);
retransmit_msg.putHeader(this.id, hdr);
ConcurrentMap<Long,Long> tmp=xmit_stats.get(sender);
if(tmp == null) {
tmp=new ConcurrentHashMap<Long,Long>();
ConcurrentMap<Long,Long> tmp2=xmit_stats.putIfAbsent(sender, tmp);
if(tmp2 != null)
tmp=tmp2;
}
for(long seq=first_seqno; seq < last_seqno; seq++) {
tmp.putIfAbsent(seq, System.currentTimeMillis());
}
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.xmit_reqs_sent.addAndGet((int)(last_seqno - first_seqno +1));
}
down_prot.down(new Event(Event.MSG, retransmit_msg));
if(stats) {
xmit_reqs_sent+=last_seqno - first_seqno +1;
updateStats(sent, sender, 1, 0, 0);
}
xmit_history.add(sender + ": " + first_seqno + "-" + last_seqno);
}
public void missingMessageReceived(long seqno, final Address original_sender) {
ConcurrentMap<Long,Long> tmp=xmit_stats.get(original_sender);
if(tmp != null) {
Long timestamp=tmp.remove(seqno);
if(timestamp != null) {
long diff=System.currentTimeMillis() - timestamp;
BoundedList<Long> list=xmit_times_history.get(original_sender);
if(list == null) {
list=new BoundedList<Long>(xmit_history_max_size);
BoundedList<Long> list2=xmit_times_history.putIfAbsent(original_sender, list);
if(list2 != null)
list=list2;
}
list.add(diff);
// compute the smoothed average for retransmission times for original_sender
// needs to be synchronized because we rely on the previous value for computation of the next value
synchronized(smoothed_avg_xmit_times) {
Double smoothed_avg=smoothed_avg_xmit_times.get(original_sender);
if(smoothed_avg == null)
smoothed_avg=INITIAL_SMOOTHED_AVG;
// the smoothed avg takes 90% of the previous value, 100% of the new value and averages them
// then, we add 10% to be on the safe side (an xmit value should rather err on the higher than lower side)
smoothed_avg=((smoothed_avg * WEIGHT) + diff) / 2;
smoothed_avg=smoothed_avg * (2 - WEIGHT);
smoothed_avg_xmit_times.put(original_sender, smoothed_avg);
}
}
}
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.missing_msgs_received.incrementAndGet();
}
if(stats) {
missing_msgs_received++;
updateStats(received, original_sender, 0, 0, 1);
}
}
/** Called when a message gap is detected */
public void messageGapDetected(long from, long to, Address src) {
if(xmit_time_stats != null) {
long key=(System.currentTimeMillis() - xmit_time_stats_start) / 1000;
XmitTimeStat stat=xmit_time_stats.get(key);
if(stat == null) {
stat=new XmitTimeStat();
XmitTimeStat stat2=xmit_time_stats.putIfAbsent(key, stat);
if(stat2 != null)
stat=stat2;
}
stat.gaps_detected.addAndGet((int)(to - from +1));
}
}
private void reset() {
seqno_lock.lock();
try {
seqno=0;
}
finally {
seqno_lock.unlock();
}
for(NakReceiverWindow win: xmit_table.values()) {
win.destroy();
}
xmit_table.clear();
undelivered_msgs.set(0);
}
@ManagedOperation(description="TODO")
public String printMessages() {
StringBuilder ret=new StringBuilder(local_addr + ":\n");
for(Map.Entry<Address,NakReceiverWindow> entry: xmit_table.entrySet()) {
Address addr=entry.getKey();
NakReceiverWindow win=entry.getValue();
ret.append(addr).append(": ").append(win.toString()).append('\n');
}
return ret.toString();
}
@ManagedOperation(description="TODO")
public String printRetransmissionAvgs() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,BoundedList<Long>> entry: xmit_times_history.entrySet()) {
Address sender=entry.getKey();
BoundedList<Long> list=entry.getValue();
long tmp=0;
int i=0;
for(Long val: list) {
tmp+=val;
i++;
}
double avg=i > 0? tmp / i: -1;
sb.append(sender).append(": ").append(avg).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printSmoothedRetransmissionAvgs() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,Double> entry: smoothed_avg_xmit_times.entrySet()) {
sb.append(entry.getKey()).append(": ").append(entry.getValue()).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="TODO")
public String printRetransmissionTimes() {
StringBuilder sb=new StringBuilder();
for(Map.Entry<Address,BoundedList<Long>> entry: xmit_times_history.entrySet()) {
Address sender=entry.getKey();
BoundedList<Long> list=entry.getValue();
sb.append(sender).append(": ").append(list).append("\n");
}
return sb.toString();
}
@ManagedOperation(description="Prints the last N retransmission requests")
public String printXmitHistory() {
StringBuilder sb=new StringBuilder();
for(String req: xmit_history)
sb.append(req).append("\n");
return sb.toString();
}
@ManagedAttribute
public double getTotalAverageRetransmissionTime() {
long total=0;
int i=0;
for(BoundedList<Long> list: xmit_times_history.values()) {
for(Long val: list) {
total+=val;
i++;
}
}
return i > 0? total / i: -1;
}
@ManagedAttribute
public double getTotalAverageSmoothedRetransmissionTime() {
double total=0.0;
int cnt=0;
synchronized(smoothed_avg_xmit_times) {
for(Double val: smoothed_avg_xmit_times.values()) {
if(val != null) {
total+=val;
cnt++;
}
}
}
return cnt > 0? total / cnt : -1;
}
/** Returns the smoothed average retransmission time for a given sender */
public double getSmoothedAverageRetransmissionTime(Address sender) {
synchronized(smoothed_avg_xmit_times) {
Double retval=smoothed_avg_xmit_times.get(sender);
if(retval == null) {
retval=INITIAL_SMOOTHED_AVG;
smoothed_avg_xmit_times.put(sender, retval);
}
return retval;
}
}
// ProbeHandler interface
public Map<String, String> handleProbe(String... keys) {
Map<String,String> retval=new HashMap<String,String>();
for(String key: keys) {
if(key.equals("digest-history"))
retval.put(key, printDigestHistory());
if(key.equals("dump-digest"))
retval.put(key, "\n" + printMessages());
}
return retval;
}
// ProbeHandler interface
public String[] supportedKeys() {
return new String[]{"digest-history", "dump-digest"};
}
// public static final class LossRate {
// private final Set<Long> received=new HashSet<Long>();
// private final Set<Long> missing=new HashSet<Long>();
// private double smoothed_loss_rate=0.0;
// public synchronized void addReceived(long seqno) {
// received.add(seqno);
// missing.remove(seqno);
// setSmoothedLossRate();
// public synchronized void addReceived(Long ... seqnos) {
// for(int i=0; i < seqnos.length; i++) {
// Long seqno=seqnos[i];
// received.add(seqno);
// missing.remove(seqno);
// setSmoothedLossRate();
// public synchronized void addMissing(long from, long to) {
// for(long i=from; i <= to; i++) {
// if(!received.contains(i))
// missing.add(i);
// setSmoothedLossRate();
// public synchronized double computeLossRate() {
// int num_missing=missing.size();
// if(num_missing == 0)
// return 0.0;
// int num_received=received.size();
// int total=num_missing + num_received;
// return num_missing / (double)total;
// public synchronized double getSmoothedLossRate() {
// return smoothed_loss_rate;
// public synchronized String toString() {
// StringBuilder sb=new StringBuilder();
// int num_missing=missing.size();
// int num_received=received.size();
// int total=num_missing + num_received;
// sb.append("total=").append(total).append(" (received=").append(received.size()).append(", missing=")
// .append(missing.size()).append(", loss rate=").append(computeLossRate())
// .append(", smoothed loss rate=").append(smoothed_loss_rate).append(")");
// return sb.toString();
// /** Set the new smoothed_loss_rate value to 70% of the new value and 30% of the old value */
// private void setSmoothedLossRate() {
// double new_loss_rate=computeLossRate();
// if(smoothed_loss_rate == 0) {
// smoothed_loss_rate=new_loss_rate;
// else {
// smoothed_loss_rate=smoothed_loss_rate * .3 + new_loss_rate * .7;
private static class XmitTimeStat {
final AtomicInteger gaps_detected=new AtomicInteger(0);
final AtomicInteger xmit_reqs_sent=new AtomicInteger(0);
final AtomicInteger xmit_reqs_received=new AtomicInteger(0);
final AtomicInteger xmit_rsps_sent=new AtomicInteger(0);
final AtomicInteger xmit_rsps_received=new AtomicInteger(0);
final AtomicInteger missing_msgs_received=new AtomicInteger(0);
}
private class ActualInterval implements Interval {
private final Address sender;
public ActualInterval(Address sender) {
this.sender=sender;
}
public long next() {
return (long)getSmoothedAverageRetransmissionTime(sender);
}
public Interval copy() {
return this;
}
}
static class StatsEntry {
long xmit_reqs, xmit_rsps, missing_msgs_rcvd;
public String toString() {
StringBuilder sb=new StringBuilder();
sb.append(xmit_reqs).append(" xmit_reqs").append(", ").append(xmit_rsps).append(" xmit_rsps");
sb.append(", ").append(missing_msgs_rcvd).append(" missing msgs");
return sb.toString();
}
}
}
|
// $Id: NAKACK.java,v 1.77 2006/05/22 09:42:20 belaban Exp $
package org.jgroups.protocols.pbcast;
import org.jgroups.*;
import org.jgroups.stack.NakReceiverWindow;
import org.jgroups.stack.Protocol;
import org.jgroups.stack.Retransmitter;
import org.jgroups.util.*;
import java.io.IOException;
import java.util.*;
/**
* Negative AcKnowledgement layer (NAKs). Messages are assigned a monotonically increasing sequence number (seqno).
* Receivers deliver messages ordered according to seqno and request retransmission of missing messages. Retransmitted
* messages are bundled into bigger ones, e.g. when getting an xmit request for messages 1-10, instead of sending 10
* unicast messages, we bundle all 10 messages into 1 and send it. However, since this protocol typically sits below
* FRAG, we cannot count on FRAG to fragement/defragment the (possibly) large message into smaller ones. Therefore we
* only bundle messages up to max_xmit_size bytes to prevent too large messages. For example, if the bundled message
* size was a total of 34000 bytes, and max_xmit_size=16000, we'd send 3 messages: 2 16K and a 2K message. <em>Note that
* max_xmit_size should be the same value as FRAG.frag_size (or smaller).</em><br/> Retransmit requests are always sent
* to the sender. If the sender dies, and not everyone has received its messages, they will be lost. In the future, this
* may be changed to have receivers store all messages, so that retransmit requests can be answered by any member.
* Trivial to implement, but not done yet. For most apps, the default retransmit properties are sufficient, if not use
* vsync.
*
* @author Bela Ban
*/
public class NAKACK extends Protocol implements Retransmitter.RetransmitCommand, NakReceiverWindow.Listener {
private long[] retransmit_timeout={600, 1200, 2400, 4800}; // time(s) to wait before requesting retransmission
private boolean is_server=false;
private Address local_addr=null;
private final Vector members=new Vector(11);
private View view;
private long seqno=-1; // current message sequence number (starts with 0)
private long max_xmit_size=8192; // max size of a retransmit message (otherwise send multiple)
private int gc_lag=20; // number of msgs garbage collection lags behind
/**
* Retransmit messages using multicast rather than unicast. This has the advantage that, if many receivers lost a
* message, the sender only retransmits once.
*/
private boolean use_mcast_xmit=true;
/**
* Ask a random member for retransmission of a missing message. If set to true, discard_delivered_msgs will be
* set to false
*/
private boolean xmit_from_random_member=false;
/**
* Messages that have been received in order are sent up the stack (= delivered to the application). Delivered
* messages are removed from NakReceiverWindow.received_msgs and moved to NakReceiverWindow.delivered_msgs, where
* they are later garbage collected (by STABLE). Since we do retransmits only from sent messages, never
* received or delivered messages, we can turn the moving to delivered_msgs off, so we don't keep the message
* around, and don't need to wait for garbage collection to remove them.
*/
private boolean discard_delivered_msgs=false;
/** If value is > 0, the retransmit buffer is bounded: only the max_xmit_buf_size latest messages are kept,
* older ones are discarded when the buffer size is exceeded. A value <= 0 means unbounded buffers
*/
private int max_xmit_buf_size=0;
/**
* Hashtable<Address,NakReceiverWindow>. Stores received messages (keyed by sender). Note that this is no long term
* storage; messages are just stored until they can be delivered (ie., until the correct FIFO order is established)
*/
private final HashMap received_msgs=new HashMap(11);
/** TreeMap<Long,Message>. Map of messages sent by me (keyed and sorted on sequence number) */
private final TreeMap sent_msgs=new TreeMap();
private boolean leaving=false;
private boolean started=false;
private TimeScheduler timer=null;
private static final String name="NAKACK";
private long xmit_reqs_received;
private long xmit_reqs_sent;
private long xmit_rsps_received;
private long xmit_rsps_sent;
private long missing_msgs_received;
/** Captures stats on XMIT_REQS, XMIT_RSPS per sender */
private HashMap sent=new HashMap();
/** Captures stats on XMIT_REQS, XMIT_RSPS per receiver */
private HashMap received=new HashMap();
private int stats_list_size=20;
/** BoundedList<XmitRequest>. Keeps track of the last stats_list_size XMIT requests */
private BoundedList receive_history;
/** BoundedList<MissingMessage>. Keeps track of the last stats_list_size missing messages received */
private BoundedList send_history;
public NAKACK() {
}
public String getName() {
return name;
}
public long getXmitRequestsReceived() {return xmit_reqs_received;}
public long getXmitRequestsSent() {return xmit_reqs_sent;}
public long getXmitResponsesReceived() {return xmit_rsps_received;}
public long getXmitResponsesSent() {return xmit_rsps_sent;}
public long getMissingMessagesReceived() {return missing_msgs_received;}
public int getPendingRetransmissionRequests() {
int num=0;
NakReceiverWindow win;
synchronized(received_msgs) {
for(Iterator it=received_msgs.values().iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
num+=win.size();
}
}
return num;
}
public int getSentTableSize() {
int size;
synchronized(sent_msgs) {
size=sent_msgs.size();
}
return size;
}
public int getReceivedTableSize() {
int ret=0;
NakReceiverWindow win;
Set s=new LinkedHashSet(received_msgs.values());
for(Iterator it=s.iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
ret+=win.size();
}
return ret;
}
public void resetStats() {
xmit_reqs_received=xmit_reqs_sent=xmit_rsps_received=xmit_rsps_sent=missing_msgs_received=0;
sent.clear();
received.clear();
if(receive_history !=null)
receive_history.removeAll();
if(send_history != null)
send_history.removeAll();
}
public void init() throws Exception {
if(stats) {
send_history=new BoundedList(stats_list_size);
receive_history=new BoundedList(stats_list_size);
}
}
public int getGcLag() {
return gc_lag;
}
public void setGcLag(int gc_lag) {
this.gc_lag=gc_lag;
}
public boolean isUseMcastXmit() {
return use_mcast_xmit;
}
public void setUseMcastXmit(boolean use_mcast_xmit) {
this.use_mcast_xmit=use_mcast_xmit;
}
public boolean isXmitFromRandomMember() {
return xmit_from_random_member;
}
public void setXmitFromRandomMember(boolean xmit_from_random_member) {
this.xmit_from_random_member=xmit_from_random_member;
}
public boolean isDiscardDeliveredMsgs() {
return discard_delivered_msgs;
}
public void setDiscardDeliveredMsgs(boolean discard_delivered_msgs) {
this.discard_delivered_msgs=discard_delivered_msgs;
}
public int getMaxXmitBufSize() {
return max_xmit_buf_size;
}
public void setMaxXmitBufSize(int max_xmit_buf_size) {
this.max_xmit_buf_size=max_xmit_buf_size;
}
public long getMaxXmitSize() {
return max_xmit_size;
}
public void setMaxXmitSize(long max_xmit_size) {
this.max_xmit_size=max_xmit_size;
}
public boolean setProperties(Properties props) {
String str;
long[] tmp;
super.setProperties(props);
str=props.getProperty("retransmit_timeout");
if(str != null) {
tmp=Util.parseCommaDelimitedLongs(str);
props.remove("retransmit_timeout");
if(tmp != null && tmp.length > 0) {
retransmit_timeout=tmp;
}
}
str=props.getProperty("gc_lag");
if(str != null) {
gc_lag=Integer.parseInt(str);
if(gc_lag < 0) {
log.error("NAKACK.setProperties(): gc_lag cannot be negative, setting it to 0");
}
props.remove("gc_lag");
}
str=props.getProperty("max_xmit_size");
if(str != null) {
max_xmit_size=Long.parseLong(str);
props.remove("max_xmit_size");
}
str=props.getProperty("use_mcast_xmit");
if(str != null) {
use_mcast_xmit=Boolean.valueOf(str).booleanValue();
props.remove("use_mcast_xmit");
}
str=props.getProperty("discard_delivered_msgs");
if(str != null) {
discard_delivered_msgs=Boolean.valueOf(str).booleanValue();
props.remove("discard_delivered_msgs");
}
str=props.getProperty("xmit_from_random_member");
if(str != null) {
xmit_from_random_member=Boolean.valueOf(str).booleanValue();
props.remove("xmit_from_random_member");
}
str=props.getProperty("max_xmit_buf_size");
if(str != null) {
max_xmit_buf_size=Integer.parseInt(str);
props.remove("max_xmit_buf_size");
}
str=props.getProperty("stats_list_size");
if(str != null) {
stats_list_size=Integer.parseInt(str);
props.remove("stats_list_size");
}
if(xmit_from_random_member) {
if(discard_delivered_msgs) {
discard_delivered_msgs=false;
log.warn("xmit_from_random_member set to true: changed discard_delivered_msgs to false");
}
}
if(props.size() > 0) {
log.error("NAKACK.setProperties(): these properties are not recognized: " + props);
return false;
}
return true;
}
public Map dumpStats() {
Map retval=super.dumpStats();
if(retval == null)
retval=new HashMap();
retval.put("xmit_reqs_received", new Long(xmit_reqs_received));
retval.put("xmit_reqs_sent", new Long(xmit_reqs_sent));
retval.put("xmit_rsps_received", new Long(xmit_rsps_received));
retval.put("xmit_rsps_sent", new Long(xmit_rsps_sent));
retval.put("missing_msgs_received", new Long(missing_msgs_received));
retval.put("sent_msgs", printSentMsgs());
StringBuffer sb=new StringBuffer();
Map.Entry entry;
Address addr;
Object w;
synchronized(received_msgs) {
for(Iterator it=received_msgs.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
addr=(Address)entry.getKey();
w=entry.getValue();
sb.append(addr).append(": ").append(w.toString()).append('\n');
}
}
retval.put("received_msgs", sb.toString());
return retval;
}
public String printStats() {
Map.Entry entry;
Object key, val;
StringBuffer sb=new StringBuffer();
sb.append("sent:\n");
for(Iterator it=sent.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
key=entry.getKey();
if(key == null) key="<mcast dest>";
val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nreceived:\n");
for(Iterator it=received.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
key=entry.getKey();
val=entry.getValue();
sb.append(key).append(": ").append(val).append("\n");
}
sb.append("\nXMIT_REQS sent:\n");
XmitRequest tmp;
for(Enumeration en=send_history.elements(); en.hasMoreElements();) {
tmp=(XmitRequest)en.nextElement();
sb.append(tmp).append("\n");
}
sb.append("\nMissing messages received\n");
MissingMessage missing;
for(Enumeration en=receive_history.elements(); en.hasMoreElements();) {
missing=(MissingMessage)en.nextElement();
sb.append(missing).append("\n");
}
return sb.toString();
}
public Vector providedUpServices() {
Vector retval=new Vector(5);
retval.addElement(new Integer(Event.GET_DIGEST));
retval.addElement(new Integer(Event.GET_DIGEST_STABLE));
retval.addElement(new Integer(Event.GET_DIGEST_STATE));
retval.addElement(new Integer(Event.SET_DIGEST));
retval.addElement(new Integer(Event.MERGE_DIGEST));
return retval;
}
public Vector providedDownServices() {
Vector retval=new Vector(2);
retval.addElement(new Integer(Event.GET_DIGEST));
retval.addElement(new Integer(Event.GET_DIGEST_STABLE));
return retval;
}
public void start() throws Exception {
timer=stack != null ? stack.timer : null;
if(timer == null)
throw new Exception("timer is null");
started=true;
}
public void stop() {
started=false;
reset(); // clears sent_msgs and destroys all NakReceiverWindows
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>passDown()</code> in this
* method as the event is passed down by default by the superclass after this method returns !</b>
*/
public void down(Event evt) {
Digest digest;
Vector mbrs;
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Address dest=msg.getDest();
if(dest != null && !dest.isMulticastAddress()) {
break; // unicast address: not null and not mcast, pass down unchanged
}
send(evt, msg);
return; // don't pass down the stack
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return; // do not pass down further (Bela Aug 7 2001)
case Event.GET_DIGEST:
digest=getDigest();
passUp(new Event(Event.GET_DIGEST_OK, digest != null ? digest.copy() : null));
return;
case Event.GET_DIGEST_STABLE:
digest=getDigestHighestDeliveredMsgs();
passUp(new Event(Event.GET_DIGEST_STABLE_OK, digest != null ? digest.copy() : null));
return;
case Event.GET_DIGEST_STATE:
digest=getDigest();
passUp(new Event(Event.GET_DIGEST_STATE_OK, digest != null ? digest.copy() : null));
return;
case Event.SET_DIGEST:
setDigest((Digest)evt.getArg());
return;
case Event.MERGE_DIGEST:
mergeDigest((Digest)evt.getArg());
return;
case Event.CONFIG:
passDown(evt);
if(log.isDebugEnabled()) {
log.debug("received CONFIG event: " + evt.getArg());
}
handleConfigEvent((HashMap)evt.getArg());
return;
case Event.TMP_VIEW:
View tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
adjustReceivers();
break;
case Event.VIEW_CHANGE:
tmp_view=(View)evt.getArg();
mbrs=tmp_view.getMembers();
members.clear();
members.addAll(mbrs);
adjustReceivers();
is_server=true; // check vids from now on
Set tmp=new LinkedHashSet(members);
tmp.add(null); // for null destination (= mcast)
sent.keySet().retainAll(tmp);
received.keySet().retainAll(tmp);
view=tmp_view;
break;
case Event.BECOME_SERVER:
is_server=true;
break;
case Event.DISCONNECT:
leaving=true;
reset();
break;
}
passDown(evt);
}
/**
* <b>Callback</b>. Called by superclass when event may be handled.<p> <b>Do not use <code>PassUp</code> in this
* method as the event is passed up by default by the superclass after this method returns !</b>
*/
public void up(Event evt) {
NakAckHeader hdr;
Message msg;
Digest digest;
switch(evt.getType()) {
case Event.MSG:
msg=(Message)evt.getArg();
hdr=(NakAckHeader)msg.getHeader(name);
if(hdr == null)
break; // pass up (e.g. unicast msg)
// discard messages while not yet server (i.e., until JOIN has returned)
if(!is_server) {
if(trace)
log.trace("message was discarded (not yet server)");
return;
}
// Changed by bela Jan 29 2003: we must not remove the header, otherwise
// further xmit requests will fail !
//hdr=(NakAckHeader)msg.removeHeader(getName());
switch(hdr.type) {
case NakAckHeader.MSG:
handleMessage(msg, hdr);
return; // transmitter passes message up for us !
case NakAckHeader.XMIT_REQ:
if(hdr.range == null) {
if(log.isErrorEnabled()) {
log.error("XMIT_REQ: range of xmit msg is null; discarding request from " + msg.getSrc());
}
return;
}
handleXmitReq(msg.getSrc(), hdr.range.low, hdr.range.high, hdr.sender);
return;
case NakAckHeader.XMIT_RSP:
if(trace)
log.trace("received missing messages " + hdr.range);
handleXmitRsp(msg);
return;
default:
if(log.isErrorEnabled()) {
log.error("NakAck header type " + hdr.type + " not known !");
}
return;
}
case Event.STABLE: // generated by STABLE layer. Delete stable messages passed in arg
stable((Digest)evt.getArg());
return; // do not pass up further (Bela Aug 7 2001)
case Event.GET_DIGEST:
digest=getDigestHighestDeliveredMsgs();
passDown(new Event(Event.GET_DIGEST_OK, digest));
return;
case Event.GET_DIGEST_STABLE:
digest=getDigestHighestDeliveredMsgs();
passDown(new Event(Event.GET_DIGEST_STABLE_OK, digest));
return;
case Event.SET_LOCAL_ADDRESS:
local_addr=(Address)evt.getArg();
break;
case Event.CONFIG:
passUp(evt);
if(log.isDebugEnabled()) {
log.debug("received CONFIG event: " + evt.getArg());
}
handleConfigEvent((HashMap)evt.getArg());
return;
}
passUp(evt);
}
private void send(Event evt, Message msg) {
if(msg == null)
throw new NullPointerException("msg is null; event is " + evt);
if(!started) {
if(warn)
log.warn("discarded message as start() has not yet been called, message: " + msg);
return;
}
synchronized(sent_msgs) {
long msg_id;
try { // incrementing seqno and adding the msg to sent_msgs needs to be atomic
msg_id=seqno +1;
msg.putHeader(name, new NakAckHeader(NakAckHeader.MSG, msg_id));
if(Global.copy) {
sent_msgs.put(new Long(msg_id), msg.copy());
}
else {
sent_msgs.put(new Long(msg_id), msg);
}
seqno=msg_id;
}
catch(Throwable t) {
if(t instanceof Error)
throw (Error)t;
if(t instanceof RuntimeException)
throw (RuntimeException)t;
else {
throw new RuntimeException("failure adding msg " + msg + " to the retransmit table", t);
}
}
try {
if(trace)
log.trace(local_addr + ": sending msg #" + msg_id);
passDown(evt); // if this fails, since msg is in sent_msgs, it can be retransmitted
}
catch(Throwable t) { // eat the exception, don't pass it up the stack
if(warn) {
log.warn("failure passing message down", t);
}
}
}
}
/**
* Finds the corresponding NakReceiverWindow and adds the message to it (according to seqno). Then removes as many
* messages as possible from the NRW and passes them up the stack. Discards messages from non-members.
*/
private void handleMessage(Message msg, NakAckHeader hdr) {
NakReceiverWindow win;
Message msg_to_deliver;
Address sender=msg.getSrc();
if(sender == null) {
if(log.isErrorEnabled())
log.error("sender of message is null");
return;
}
if(trace) {
StringBuffer sb=new StringBuffer('[');
sb.append(local_addr).append(": received ").append(sender).append('#').append(hdr.seqno);
log.trace(sb.toString());
}
// msg is potentially re-sent later as result of XMIT_REQ reception; that's why hdr is added !
// Changed by bela Jan 29 2003: we currently don't resend from received msgs, just from sent_msgs !
// msg.putHeader(getName(), hdr);
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
}
if(win == null) { // discard message if there is no entry for sender
if(leaving)
return;
if(warn) {
StringBuffer sb=new StringBuffer('[');
sb.append(local_addr).append("] discarded message from non-member ")
.append(sender).append(", my view is " ).append(this.view);
log.warn(sb);
}
return;
}
win.add(hdr.seqno, msg); // add in order, then remove and pass up as many msgs as possible
// where lots of threads can come up to this point concurrently, but only 1 is allowed to pass at a time
// We *can* deliver messages from *different* senders concurrently, e.g. reception of P1, Q1, P2, Q2 can result in
// delivery of P1, Q1, Q2, P2: FIFO (implemented by NAKACK) says messages need to be delivered only in the
// order in which they were sent by the sender
synchronized(win) {
while((msg_to_deliver=win.remove()) != null) {
// Changed by bela Jan 29 2003: not needed (see above)
//msg_to_deliver.removeHeader(getName());
passUp(new Event(Event.MSG, msg_to_deliver));
}
}
}
/**
* Retransmit from sent-table, called when XMIT_REQ is received. Bundles all messages to be xmitted into one large
* message and sends them back with an XMIT_RSP header. Note that since we cannot count on a fragmentation layer
* below us, we have to make sure the message doesn't exceed max_xmit_size bytes. If this is the case, we split the
* message into multiple, smaller-chunked messages. But in most cases this still yields fewer messages than if each
* requested message was retransmitted separately.
*
* @param xmit_requester The sender of the XMIT_REQ, we have to send the requested copy of the message to this address
* @param first_seqno The first sequence number to be retransmitted (<= last_seqno)
* @param last_seqno The last sequence number to be retransmitted (>= first_seqno)
* @param original_sender The member who originally sent the messsage. Guaranteed to be non-null
*/
private void handleXmitReq(Address xmit_requester, long first_seqno, long last_seqno, Address original_sender) {
Message m, tmp;
LinkedList list;
long size=0, marker=first_seqno, len;
NakReceiverWindow win=null;
boolean amISender; // am I the original sender ?
if(trace) {
StringBuffer sb=new StringBuffer();
sb.append(local_addr).append(": received xmit request from ").append(xmit_requester).append(" for ");
sb.append(original_sender).append(" [").append(first_seqno).append(" - ").append(last_seqno).append("]");
log.trace(sb.toString());
}
if(first_seqno > last_seqno) {
if(log.isErrorEnabled())
log.error("first_seqno (" + first_seqno + ") > last_seqno (" + last_seqno + "): not able to retransmit");
return;
}
if(stats) {
xmit_reqs_received+=last_seqno - first_seqno +1;
updateStats(received, xmit_requester, 1, 0, 0);
}
amISender=local_addr.equals(original_sender);
if(!amISender)
win=(NakReceiverWindow)received_msgs.get(original_sender);
list=new LinkedList();
for(long i=first_seqno; i <= last_seqno; i++) {
if(amISender) {
m=(Message)sent_msgs.get(new Long(i)); // no need to synchronize
}
else {
m=win != null? win.get(i) : null;
}
if(m == null) {
if(log.isErrorEnabled()) {
StringBuffer sb=new StringBuffer();
sb.append("(requester=").append(xmit_requester).append(", local_addr=").append(this.local_addr);
sb.append(") message ").append(original_sender).append("::").append(i);
sb.append(" not found in ").append((amISender? "sent" : "received")).append(" msgs. ");
if(win != null) {
sb.append("Received messages from ").append(original_sender).append(": ").append(win.toString());
}
else {
sb.append("\nSent messages: ").append(printSentMsgs());
}
log.error(sb);
}
continue;
}
len=m.size();
size+=len;
if(size > max_xmit_size && list.size() > 0) { // changed from >= to > (yaron-r, bug #943709)
// yaronr: added &&listSize()>0 since protocols between FRAG and NAKACK add headers, and message exceeds size.
// size has reached max_xmit_size. go ahead and send message (excluding the current message)
if(trace)
log.trace("xmitting msgs [" + marker + '-' + (i - 1) + "] to " + xmit_requester);
sendXmitRsp(xmit_requester, (LinkedList)list.clone(), marker, i - 1);
marker=i;
list.clear();
// fixed Dec 15 2003 (bela, patch from Joel Dice (dicej)), see explanantion under
// bug report #854887
size=len;
}
if(Global.copy) {
tmp=m.copy();
}
else {
tmp=m;
}
// tmp.setDest(xmit_requester);
// tmp.setSrc(local_addr);
if(tmp.getSrc() == null)
tmp.setSrc(local_addr);
list.add(tmp);
}
if(list.size() > 0) {
if(trace)
log.trace("xmitting msgs [" + marker + '-' + last_seqno + "] to " + xmit_requester);
sendXmitRsp(xmit_requester, (LinkedList)list.clone(), marker, last_seqno);
list.clear();
}
}
private static void updateStats(HashMap map, Address key, int req, int rsp, int missing) {
Entry entry=(Entry)map.get(key);
if(entry == null) {
entry=new Entry();
map.put(key, entry);
}
entry.xmit_reqs+=req;
entry.xmit_rsps+=rsp;
entry.missing_msgs_rcvd+=missing;
}
private void sendXmitRsp(Address dest, LinkedList xmit_list, long first_seqno, long last_seqno) {
Buffer buf;
if(xmit_list == null || xmit_list.size() == 0) {
if(log.isErrorEnabled())
log.error("xmit_list is empty");
return;
}
if(use_mcast_xmit)
dest=null;
if(stats) {
xmit_rsps_sent+=xmit_list.size();
updateStats(sent, dest, 0, 1, 0);
}
try {
buf=Util.msgListToByteBuffer(xmit_list);
Message msg=new Message(dest, null, buf.getBuf(), buf.getOffset(), buf.getLength());
msg.putHeader(name, new NakAckHeader(NakAckHeader.XMIT_RSP, first_seqno, last_seqno));
passDown(new Event(Event.MSG, msg));
}
catch(IOException ex) {
log.error("failed marshalling xmit list", ex);
}
}
private void handleXmitRsp(Message msg) {
LinkedList list;
Message m;
if(msg == null) {
if(warn)
log.warn("message is null");
return;
}
try {
list=Util.byteBufferToMessageList(msg.getRawBuffer(), msg.getOffset(), msg.getLength());
if(list != null) {
if(stats) {
xmit_rsps_received+=list.size();
updateStats(received, msg.getSrc(), 0, 1, 0);
}
for(Iterator it=list.iterator(); it.hasNext();) {
m=(Message)it.next();
up(new Event(Event.MSG, m));
}
list.clear();
}
}
catch(Exception ex) {
if(log.isErrorEnabled()) {
log.error("failed reading list of retransmitted messages", ex);
}
}
}
/**
* Remove old members from NakReceiverWindows and add new members (starting seqno=0). Essentially removes all
* entries from received_msgs that are not in <code>members</code>
*/
private void adjustReceivers() {
Address sender;
NakReceiverWindow win;
synchronized(received_msgs) {
// 1. Remove all senders in received_msgs that are not members anymore
for(Iterator it=received_msgs.keySet().iterator(); it.hasNext();) {
sender=(Address)it.next();
if(!members.contains(sender)) {
win=(NakReceiverWindow)received_msgs.get(sender);
win.reset();
if(log.isDebugEnabled()) {
log.debug("removing " + sender + " from received_msgs (not member anymore)");
}
it.remove();
}
}
// 2. Add newly joined members to received_msgs (starting seqno=0)
for(int i=0; i < members.size(); i++) {
sender=(Address)members.elementAt(i);
if(!received_msgs.containsKey(sender)) {
win=createNakReceiverWindow(sender, 0);
received_msgs.put(sender, win);
}
}
}
}
/**
* Returns a message digest: for each member P the highest seqno received from P is added to the digest.
*/
private Digest getDigest() {
Digest digest;
Address sender;
Range range;
digest=new Digest(members.size());
for(int i=0; i < members.size(); i++) {
sender=(Address)members.elementAt(i);
range=getLowestAndHighestSeqno(sender, false); // get the highest received seqno
if(range == null) {
if(log.isErrorEnabled()) {
log.error("range is null");
}
continue;
}
digest.add(sender, range.low, range.high); // add another entry to the digest
}
return digest;
}
/**
* Returns a message digest: for each member P the highest seqno received from P <em>without a gap</em> is added to
* the digest. E.g. if the seqnos received from P are [+3 +4 +5 -6 +7 +8], then 5 will be returned. Also, the
* highest seqno <em>seen</em> is added. The max of all highest seqnos seen will be used (in STABLE) to determine
* whether the last seqno from a sender was received (see "Last Message Dropped" topic in DESIGN).
*/
private Digest getDigestHighestDeliveredMsgs() {
Digest digest;
Address sender;
Range range;
long high_seqno_seen;
digest=new Digest(members.size());
for(int i=0; i < members.size(); i++) {
sender=(Address)members.elementAt(i);
range=getLowestAndHighestSeqno(sender, true); // get the highest deliverable seqno
if(range == null) {
if(log.isErrorEnabled()) {
log.error("range is null");
}
continue;
}
high_seqno_seen=getHighSeqnoSeen(sender);
digest.add(sender, range.low, range.high, high_seqno_seen); // add another entry to the digest
}
return digest;
}
/**
* Creates a NakReceiverWindow for each sender in the digest according to the sender's seqno. If NRW already exists,
* reset it.
*/
private void setDigest(Digest d) {
if(d == null || d.senders == null) {
if(log.isErrorEnabled()) {
log.error("digest or digest.senders is null");
}
return;
}
clear();
Map.Entry entry;
Address sender;
org.jgroups.protocols.pbcast.Digest.Entry val;
long initial_seqno;
NakReceiverWindow win;
for(Iterator it=d.senders.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
sender=(Address)entry.getKey();
val=(org.jgroups.protocols.pbcast.Digest.Entry)entry.getValue();
if(sender == null || val == null) {
if(warn) {
log.warn("sender or value is null");
}
continue;
}
initial_seqno=val.high_seqno;
win=createNakReceiverWindow(sender, initial_seqno);
synchronized(received_msgs) {
received_msgs.put(sender, win);
}
}
}
/**
* For all members of the digest, adjust the NakReceiverWindows in the received_msgs hashtable. If the member
* already exists, sets its seqno to be the max of the seqno and the seqno of the member in the digest. If no entry
* exists, create one with the initial seqno set to the seqno of the member in the digest.
*/
private void mergeDigest(Digest d) {
if(d == null || d.senders == null) {
if(log.isErrorEnabled()) {
log.error("digest or digest.senders is null");
}
return;
}
Map.Entry entry;
Address sender;
org.jgroups.protocols.pbcast.Digest.Entry val;
NakReceiverWindow win;
long initial_seqno;
for(Iterator it=d.senders.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
sender=(Address)entry.getKey();
val=(org.jgroups.protocols.pbcast.Digest.Entry)entry.getValue();
if(sender == null || val == null) {
if(warn) {
log.warn("sender or value is null");
}
continue;
}
initial_seqno=val.high_seqno;
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
if(win == null) {
win=createNakReceiverWindow(sender, initial_seqno);
received_msgs.put(sender, win);
}
else {
if(win.getHighestReceived() < initial_seqno) {
win.reset();
received_msgs.remove(sender);
win=createNakReceiverWindow(sender, initial_seqno);
received_msgs.put(sender, win);
}
}
}
}
}
private NakReceiverWindow createNakReceiverWindow(Address sender, long initial_seqno) {
NakReceiverWindow win=new NakReceiverWindow(sender, this, initial_seqno, timer);
win.setRetransmitTimeouts(retransmit_timeout);
win.setDiscardDeliveredMessages(discard_delivered_msgs);
win.setMaxXmitBufSize(this.max_xmit_buf_size);
if(stats)
win.setListener(this);
return win;
}
/**
* Returns the lowest seqno still in cache (so it can be retransmitted) and the highest seqno received so far.
*
* @param sender The address for which the highest and lowest seqnos are to be retrieved
* @param stop_at_gaps If true, the highest seqno *deliverable* will be returned. If false, the highest seqno
* *received* will be returned. E.g. for [+3 +4 +5 -6 +7 +8], the highest_seqno_received is 8,
* whereas the higheset_seqno_seen (deliverable) is 5.
*/
private Range getLowestAndHighestSeqno(Address sender, boolean stop_at_gaps) {
Range r=null;
NakReceiverWindow win;
if(sender == null) {
if(log.isErrorEnabled()) {
log.error("sender is null");
}
return r;
}
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
}
if(win == null) {
if(log.isErrorEnabled()) {
log.error("sender " + sender + " not found in received_msgs");
}
return r;
}
if(stop_at_gaps) {
r=new Range(win.getLowestSeen(), win.getHighestSeen()); // deliverable messages (no gaps)
}
else {
r=new Range(win.getLowestSeen(), win.getHighestReceived() + 1); // received messages
}
return r;
}
/**
* Returns the highest seqno seen from sender. E.g. if we received 1, 2, 4, 5 from P, then 5 will be returned
* (doesn't take gaps into account). If we are the sender, we will return the highest seqno <em>sent</em> rather
* then <em>received</em>
*/
private long getHighSeqnoSeen(Address sender) {
NakReceiverWindow win;
long ret=0;
if(sender == null) {
if(log.isErrorEnabled()) {
log.error("sender is null");
}
return ret;
}
if(sender.equals(local_addr)) {
return seqno - 1;
}
synchronized(received_msgs) {
win=(NakReceiverWindow)received_msgs.get(sender);
}
if(win == null) {
if(log.isErrorEnabled()) {
log.error("sender " + sender + " not found in received_msgs");
}
return ret;
}
ret=win.getHighestReceived();
return ret;
}
/**
* Garbage collect messages that have been seen by all members. Update sent_msgs: for the sender P in the digest
* which is equal to the local address, garbage collect all messages <= seqno at digest[P]. Update received_msgs:
* for each sender P in the digest and its highest seqno seen SEQ, garbage collect all delivered_msgs in the
* NakReceiverWindow corresponding to P which are <= seqno at digest[P].
*/
private void stable(Digest d) {
NakReceiverWindow recv_win;
long my_highest_rcvd; // highest seqno received in my digest for a sender P
long stability_highest_rcvd; // highest seqno received in the stability vector for a sender P
if(members == null || local_addr == null || d == null) {
if(warn)
log.warn("members, local_addr or digest are null !");
return;
}
if(trace) {
log.trace("received stable digest " + d);
}
Map.Entry entry;
Address sender;
org.jgroups.protocols.pbcast.Digest.Entry val;
long high_seqno_delivered, high_seqno_received;
for(Iterator it=d.senders.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
sender=(Address)entry.getKey();
if(sender == null)
continue;
val=(org.jgroups.protocols.pbcast.Digest.Entry)entry.getValue();
high_seqno_delivered=val.high_seqno;
high_seqno_received=val.high_seqno_seen;
// check whether the last seqno received for a sender P in the stability vector is > last seqno
// received for P in my digest. if yes, request retransmission (see "Last Message Dropped" topic
// in DESIGN)
synchronized(received_msgs) {
recv_win=(NakReceiverWindow)received_msgs.get(sender);
}
if(recv_win != null) {
my_highest_rcvd=recv_win.getHighestReceived();
stability_highest_rcvd=high_seqno_received;
if(stability_highest_rcvd >= 0 && stability_highest_rcvd > my_highest_rcvd) {
if(trace) {
log.trace("my_highest_rcvd (" + my_highest_rcvd + ") < stability_highest_rcvd (" +
stability_highest_rcvd + "): requesting retransmission of " +
sender + '#' + stability_highest_rcvd);
}
retransmit(stability_highest_rcvd, stability_highest_rcvd, sender);
}
}
high_seqno_delivered-=gc_lag;
if(high_seqno_delivered < 0) {
continue;
}
if(trace)
log.trace("deleting msgs <= " + high_seqno_delivered + " from " + sender);
// garbage collect from sent_msgs if sender was myself
if(sender.equals(local_addr)) {
synchronized(sent_msgs) {
// gets us a subset from [lowest seqno - seqno]
SortedMap stable_keys=sent_msgs.headMap(new Long(high_seqno_delivered));
if(stable_keys != null) {
stable_keys.clear(); // this will modify sent_msgs directly
}
}
}
// delete *delivered* msgs that are stable
// recv_win=(NakReceiverWindow)received_msgs.get(sender);
if(recv_win != null)
recv_win.stable(high_seqno_delivered); // delete all messages with seqnos <= seqno
}
}
/**
* Implementation of Retransmitter.RetransmitCommand. Called by retransmission thread when gap is detected.
*/
public void retransmit(long first_seqno, long last_seqno, Address sender) {
NakAckHeader hdr;
Message retransmit_msg;
Address dest=sender; // to whom do we send the XMIT request ?
if(xmit_from_random_member && !local_addr.equals(sender)) {
Address random_member=(Address)Util.pickRandomElement(members);
if(random_member != null && !local_addr.equals(random_member)) {
dest=random_member;
if(trace)
log.trace("picked random member " + dest + " to send XMIT request to");
}
}
hdr=new NakAckHeader(NakAckHeader.XMIT_REQ, first_seqno, last_seqno, sender);
retransmit_msg=new Message(dest, null, null);
if(trace)
log.trace(local_addr + ": sending XMIT_REQ ([" + first_seqno + ", " + last_seqno + "]) to " + dest);
retransmit_msg.putHeader(name, hdr);
passDown(new Event(Event.MSG, retransmit_msg));
if(stats) {
xmit_reqs_sent+=last_seqno - first_seqno +1;
updateStats(sent, dest, 1, 0, 0);
for(long i=first_seqno; i <= last_seqno; i++) {
XmitRequest req=new XmitRequest(sender, i, dest);
send_history.add(req);
}
}
}
public void missingMessageReceived(long seqno, Message msg) {
if(stats) {
missing_msgs_received++;
updateStats(received, msg.getSrc(), 0, 0, 1);
MissingMessage missing=new MissingMessage(msg.getSrc(), seqno);
receive_history.add(missing);
}
}
private void clear() {
NakReceiverWindow win;
// changed April 21 2004 (bela): SourceForge bug# 938584. We cannot delete our own messages sent between
// a join() and a getState(). Otherwise retransmission requests from members who missed those msgs might
// fail. Not to worry though: those msgs will be cleared by STABLE (message garbage collection)
// sent_msgs.clear();
synchronized(received_msgs) {
for(Iterator it=received_msgs.values().iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
win.reset();
}
received_msgs.clear();
}
}
private void reset() {
NakReceiverWindow win;
synchronized(sent_msgs) {
sent_msgs.clear();
seqno=-1;
}
synchronized(received_msgs) {
for(Iterator it=received_msgs.values().iterator(); it.hasNext();) {
win=(NakReceiverWindow)it.next();
win.destroy();
}
received_msgs.clear();
}
}
public String printMessages() {
StringBuffer ret=new StringBuffer();
Map.Entry entry;
Address addr;
Object w;
ret.append("\nsent_msgs: ").append(printSentMsgs());
ret.append("\nreceived_msgs:\n");
synchronized(received_msgs) {
for(Iterator it=received_msgs.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
addr=(Address)entry.getKey();
w=entry.getValue();
ret.append(addr).append(": ").append(w.toString()).append('\n');
}
}
return ret.toString();
}
public String printSentMsgs() {
StringBuffer sb=new StringBuffer();
Long min_seqno, max_seqno;
synchronized(sent_msgs) {
min_seqno=sent_msgs.size() > 0 ? (Long)sent_msgs.firstKey() : new Long(0);
max_seqno=sent_msgs.size() > 0 ? (Long)sent_msgs.lastKey() : new Long(0);
}
sb.append('[').append(min_seqno).append(" - ").append(max_seqno).append("] (").append(sent_msgs.size()).append(")");
return sb.toString();
}
private void handleConfigEvent(HashMap map) {
if(map == null) {
return;
}
if(map.containsKey("frag_size")) {
max_xmit_size=((Integer)map.get("frag_size")).intValue();
if(log.isInfoEnabled()) {
log.info("max_xmit_size=" + max_xmit_size);
}
}
}
static class Entry {
long xmit_reqs, xmit_rsps, missing_msgs_rcvd;
public String toString() {
StringBuffer sb=new StringBuffer();
sb.append(xmit_reqs).append(" xmit_reqs").append(", ").append(xmit_rsps).append(" xmit_rsps");
sb.append(", ").append(missing_msgs_rcvd).append(" missing msgs");
return sb.toString();
}
}
static class XmitRequest {
Address original_sender; // original sender of message
long seq, timestamp=System.currentTimeMillis();
Address xmit_dest; // destination to which XMIT_REQ is sent, usually the original sender
XmitRequest(Address original_sender, long seqno, Address xmit_dest) {
this.original_sender=original_sender;
this.xmit_dest=xmit_dest;
this.seq=seqno;
}
public String toString() {
StringBuffer sb=new StringBuffer();
sb.append(new Date(timestamp)).append(": ").append(original_sender).append(" #").append(seq);
sb.append(" (XMIT_REQ sent to ").append(xmit_dest).append(")");
return sb.toString();
}
}
static class MissingMessage {
Address original_sender;
long seq, timestamp=System.currentTimeMillis();
MissingMessage(Address original_sender, long seqno) {
this.original_sender=original_sender;
this.seq=seqno;
}
public String toString() {
StringBuffer sb=new StringBuffer();
sb.append(new Date(timestamp)).append(": ").append(original_sender).append(" #").append(seq);
return sb.toString();
}
}
}
|
/*
* $Id: CrawlManagerImpl.java,v 1.144 2012-07-11 18:53:44 tlipkis Exp $
*/
package org.lockss.crawler;
import java.util.*;
import org.apache.commons.lang.builder.CompareToBuilder;
import org.apache.commons.collections.*;
import org.apache.commons.collections.map.*;
import org.apache.commons.collections.bag.HashBag; // needed to disambiguate
import org.apache.commons.collections.set.ListOrderedSet;
import org.apache.oro.text.regex.*;
import EDU.oswego.cs.dl.util.concurrent.*;
import org.lockss.config.*;
import org.lockss.daemon.*;
import org.lockss.daemon.status.*;
import org.lockss.state.NodeState;
import org.lockss.util.*;
import org.lockss.app.*;
import org.lockss.alert.*;
import org.lockss.state.*;
import org.lockss.plugin.*;
import org.lockss.plugin.exploded.*;
import org.lockss.plugin.PluginManager.AuEvent;
/**
* This is the interface for the object that will sit between the crawler
* and the rest of the world. It mediates the different crawl types.
*/
// ToDo:
// 1)handle background crawls
// 2)check for conflicting crawl types
// 3)check crawl schedule rules
public class CrawlManagerImpl extends BaseLockssDaemonManager
implements CrawlManager, CrawlManager.StatusSource, ConfigurableManager {
static Logger logger = Logger.getLogger("CrawlManager");
public static final String PREFIX = Configuration.PREFIX + "crawler.";
/**
* The expiration deadline for a new content crawl, in ms.
*/
public static final String PARAM_NEW_CONTENT_CRAWL_EXPIRATION =
PREFIX + "new_content.expiration";
private static final long DEFAULT_NEW_CONTENT_CRAWL_EXPIRATION =
10 * Constants.DAY;
/**
* The expiration deadline for a repair crawl, in ms.
*/
public static final String PARAM_REPAIR_CRAWL_EXPIRATION =
PREFIX + "repair.expiration";
private static final long DEFAULT_REPAIR_CRAWL_EXPIRATION =
5 * Constants.DAY;
public static final String PARAM_REPAIR_FROM_CACHE_PERCENT =
PREFIX + "repair.repair_from_cache_percent";
public static final float DEFAULT_REPAIR_FROM_CACHE_PERCENT = 0;
/** Set false to prevent all crawl activity */
public static final String PARAM_CRAWLER_ENABLED =
PREFIX + "enabled";
static final boolean DEFAULT_CRAWLER_ENABLED = true;
/** Use thread pool and queue if true, start threads directly if false.
* Only takes effect at startup. */
public static final String PARAM_CRAWLER_QUEUE_ENABLED =
PREFIX + "queue.enabled";
static final boolean DEFAULT_CRAWLER_QUEUE_ENABLED = true;
/** Max threads in crawler thread pool. Does not include repair crawls,
* which are limited only by the number of running polls. */
public static final String PARAM_CRAWLER_THREAD_POOL_MAX =
PREFIX + "threadPool.max";
static final int DEFAULT_CRAWLER_THREAD_POOL_MAX = 15;
/** Thread pool on-demand choice mode. If true, crawl starter thread
blocks in execute until a thread is ready, then chooses the best next
crawl. Only takes effect at startup. */
public static final String PARAM_USE_ODC = PREFIX + "threadPool.onDemand";
static final boolean DEFAULT_USE_ODC = true;
/** Max size of crawl queue, cannot be changed except at startup */
public static final String PARAM_CRAWLER_THREAD_POOL_MAX_QUEUE_SIZE =
PREFIX + "threadPool.maxQueueSize";
static final int DEFAULT_CRAWLER_THREAD_POOL_MAX_QUEUE_SIZE = 200;
/** Max number of queued crawls; can be changed on the fly up to the max
* set by {@link #PARAM_CRAWLER_THREAD_POOL_MAX_QUEUE_SIZE} */
public static final String PARAM_CRAWLER_THREAD_POOL_QUEUE_SIZE =
PREFIX + "threadPool.queueSize";
static final int DEFAULT_CRAWLER_THREAD_POOL_QUEUE_SIZE = 100;
/** Duration after which idle threads will be terminated.. -1 = never */
public static final String PARAM_CRAWLER_THREAD_POOL_KEEPALIVE =
PREFIX + "threadPool.keepAlive";
static final long DEFAULT_CRAWLER_THREAD_POOL_KEEPALIVE =
2 * Constants.MINUTE;
/** Interval at which we check AUs to see if they need a new content
* crawl. */
public static final String PARAM_START_CRAWLS_INTERVAL =
PREFIX + "startCrawlsInterval";
static final long DEFAULT_START_CRAWLS_INTERVAL = 1 * Constants.HOUR;
/** Initial delay after AUs started before crawl starter first runs. */
public static final String PARAM_START_CRAWLS_INITIAL_DELAY =
PREFIX + "startCrawlsInitialDelay";
static final long DEFAULT_START_CRAWLS_INITIAL_DELAY = 2 * Constants.MINUTE;
// ODC params
static String ODC_PREFIX = PREFIX + "odc.";
/** Max interval between recalculating crawl queue order */
public static final String PARAM_REBUILD_CRAWL_QUEUE_INTERVAL =
ODC_PREFIX + "queueRecalcInterval";
static final long DEFAULT_REBUILD_CRAWL_QUEUE_INTERVAL = Constants.HOUR;
/** Interval after new AU creation to recalc queue. Should be large
* enough that it only happens once when a batch of AUs is finished. */
public static final String PARAM_QUEUE_RECALC_AFTER_NEW_AU =
ODC_PREFIX + "queueRecalcAfterNewAu";
static final long DEFAULT_QUEUE_RECALC_AFTER_NEW_AU = 1 * Constants.MINUTE;
/** Interval to sleep when queue empty, before recalc. */
public static final String PARAM_QUEUE_EMPTY_SLEEP =
ODC_PREFIX + "queueEmptySleep";
static final long DEFAULT_QUEUE_EMPTY_SLEEP = 15 * Constants.MINUTE;
/** Size of queue of unshared rate AUs. */
public static final String PARAM_UNSHARED_QUEUE_MAX =
ODC_PREFIX + "unsharedQueueMax";
static final int DEFAULT_UNSHARED_QUEUE_MAX = 5;
/** Size of queue of shared rate AUs. */
public static final String PARAM_SHARED_QUEUE_MAX =
ODC_PREFIX + "sharedQueueMax";
static final int DEFAULT_SHARED_QUEUE_MAX = 5;
/** Min number of threads available to AUs with unshared rate limiters */
public static final String PARAM_FAVOR_UNSHARED_RATE_THREADS =
ODC_PREFIX + "favorUnsharedRateThreads";
static final int DEFAULT_FAVOR_UNSHARED_RATE_THREADS = 1;
enum CrawlOrder {CrawlDate, CreationDate};
/** Determines how the crawl queues are sorted. <code>CrawlDate</code>:
* By recency of previous crawl attempt, etc. (Attempts to give all AUs
* an equal chance to crawl as often as they want.);
* <code>CreationDate</code>: by order in which AUs were
* created. (Attempts to synchronize crawls of AU across machines to
* optimize for earliest polling.) */
public static final String PARAM_CRAWL_ORDER = PREFIX + "crawlOrder";
public static final CrawlOrder DEFAULT_CRAWL_ORDER = CrawlOrder.CrawlDate;
/** Maximum rate at which we will start repair crawls for any particular
* AU */
public static final String PARAM_MAX_REPAIR_RATE =
PREFIX + "maxRepairRate";
public static final String DEFAULT_MAX_REPAIR_RATE = "50/1d";
/** Maximum rate at which we will start new content crawls for any
* particular AU */
public static final String PARAM_MAX_NEW_CONTENT_RATE =
PREFIX + "maxNewContentRate";
public static final String DEFAULT_MAX_NEW_CONTENT_RATE = "1/18h";
/** Maximum rate at which we will start new content crawls for any
* particular plugin registry */
public static final String PARAM_MAX_PLUGIN_REGISTRY_NEW_CONTENT_RATE =
PREFIX + "maxPluginRegistryNewContentRate";
public static final String DEFAULT_MAX_PLUGIN_REGISTRY_NEW_CONTENT_RATE =
"1/2h";
/** Maximum rate at which we will start any new content crawl, to keep
* multiple crawls from starting at exactly the same time and all
* fatching in synch. Should be one event per less than a second,
* relatively prime to fetch delay. */
public static final String PARAM_NEW_CONTENT_START_RATE =
PREFIX + "newContentStartRate";
public static final String DEFAULT_NEW_CONTENT_START_RATE = "1/730";
/** Don't start crawl if window will close before this interval */
public static final String PARAM_MIN_WINDOW_OPEN_FOR =
PREFIX + "minWindowOpenFor";
public static final long DEFAULT_MIN_WINDOW_OPEN_FOR = 15 * Constants.MINUTE;
/** If true, give priority to crawls that were running when daemon died */
public static final String PARAM_RESTART_AFTER_CRASH =
PREFIX + "restartAfterCrash";
public static final boolean DEFAULT_RESTART_AFTER_CRASH = true;
/** Number of most recent crawls for which status will be available.
* This must be larger than the thread pool + queue size or status table
* will be incomplete. */
static final String PARAM_HISTORY_MAX =
PREFIX + "historySize";
static final int DEFAULT_HISTORY_MAX = 500;
/** Map of regexp to priority. If set, AUIDs are assigned the
* corresponding crawl priority of the first regexp they match. Priority
* must be an integer greater than -10000. A priority <= 10000
* disables matching AUs from crawling at all. */
static final String PARAM_CRAWL_PRIORITY_AUID_MAP =
PREFIX + "crawlPriorityAuidMap";
static final List DEFAULT_CRAWL_PRIORITY_AUID_MAP = null;
/** Map of concurrent pool name to pool size. By default all pools are
* of size 1; to enable 2 concurrent crawls in pool FOO, add FOO,2 to
* this list. */
static final String PARAM_CONCURRENT_CRAWL_LIMIT_MAP = PREFIX +
"concurrentCrawlLimitMap";
static final List DEFAULT_CONCURRENT_CRAWL_LIMIT_MAP = null;
static final int MIN_CRAWL_PRIORITY = -10000;
/** Regexp matching URLs we never want to collect. Intended to stop
* runaway crawls by catching recursive URLS */
static final String PARAM_EXCLUDE_URL_PATTERN =
PREFIX + "globallyExcludedUrlPattern";
static final String DEFAULT_EXCLUDE_URL_PATTERN = null;
static final String WDOG_PARAM_CRAWLER = "Crawler";
static final long WDOG_DEFAULT_CRAWLER = 2 * Constants.HOUR;
static final String PRIORITY_PARAM_CRAWLER = "Crawler";
static final int PRIORITY_DEFAULT_CRAWLER = Thread.NORM_PRIORITY - 1;
public static final String CRAWL_STATUS_TABLE_NAME = "crawl_status_table";
public static final String CRAWL_URLS_STATUS_TABLE =
"crawl_urls";
public static final String SINGLE_CRAWL_STATUS_TABLE =
"single_crawl_status_table";
private PluginManager pluginMgr;
private AlertManager alertMgr;
//Tracking crawls for the status info
private CrawlManagerStatus cmStatus;
// Lock for structures updated when a crawl starts or ends
Object runningCrawlersLock = new Object();
// Maps pool key to record of all crawls active in that pool
// Synchronized on runningCrawlersLock
private Map<String,PoolCrawlers> poolMap = new HashMap<String,PoolCrawlers>();
// AUs running new content crawls
// Synchronized on runningCrawlersLock
private Set<ArchivalUnit> runningNCCrawls = new HashSet<ArchivalUnit>();
private long contentCrawlExpiration;
private long repairCrawlExpiration;
private float percentRepairFromCache;
private boolean crawlerEnabled = DEFAULT_CRAWLER_ENABLED;
private boolean paramQueueEnabled = DEFAULT_CRAWLER_QUEUE_ENABLED;
private int paramMaxPoolSize = DEFAULT_CRAWLER_THREAD_POOL_MAX;
private boolean paramOdc = DEFAULT_USE_ODC;
private int paramPoolQueueSize = DEFAULT_CRAWLER_THREAD_POOL_QUEUE_SIZE;
private int paramPoolMaxQueueSize =
DEFAULT_CRAWLER_THREAD_POOL_MAX_QUEUE_SIZE;
private long paramPoolKeepaliveTime = DEFAULT_CRAWLER_THREAD_POOL_KEEPALIVE;
private long paramStartCrawlsInterval = DEFAULT_START_CRAWLS_INTERVAL;
private long paramStartCrawlsInitialDelay =
DEFAULT_START_CRAWLS_INITIAL_DELAY;
private long paramMinWindowOpenFor = DEFAULT_MIN_WINDOW_OPEN_FOR;
private boolean paramRestartAfterCrash = DEFAULT_RESTART_AFTER_CRASH;
/** Note that this is an Apache ORO Pattern, not a Java Pattern */
private Pattern globallyExcludedUrlPattern;
private Map<Pattern,Integer> crawlPriorityAuidMap;
private Map<String,Integer> concurrentCrawlLimitMap;
private int histSize = DEFAULT_HISTORY_MAX;
private RateLimiter.LimiterMap repairRateLimiters =
new RateLimiter.LimiterMap(PARAM_MAX_REPAIR_RATE,
DEFAULT_MAX_REPAIR_RATE);
private RateLimiter.LimiterMap newContentRateLimiters =
new RateLimiter.LimiterMap(PARAM_MAX_NEW_CONTENT_RATE,
DEFAULT_MAX_NEW_CONTENT_RATE);
private RateLimiter.LimiterMap pluginRegistryNewContentRateLimiters =
new RateLimiter.LimiterMap(PARAM_MAX_PLUGIN_REGISTRY_NEW_CONTENT_RATE,
DEFAULT_MAX_PLUGIN_REGISTRY_NEW_CONTENT_RATE);
private RateLimiter newContentStartRateLimiter;
private AuEventHandler auCreateDestroyHandler;
PooledExecutor pool;
BoundedPriorityQueue poolQueue;
/**
* start the crawl manager.
* @see org.lockss.app.LockssManager#startService()
*/
public void startService() {
super.startService();
LockssDaemon daemon = getDaemon();
pluginMgr = daemon.getPluginManager();
alertMgr = daemon.getAlertManager();
paramOdc = CurrentConfig.getBooleanParam(PARAM_USE_ODC, DEFAULT_USE_ODC);
cmStatus = new CrawlManagerStatus(histSize);
cmStatus.setOdc(paramOdc);
StatusService statusServ = daemon.getStatusService();
statusServ.registerStatusAccessor(CRAWL_STATUS_TABLE_NAME,
new CrawlManagerStatusAccessor(this));
statusServ.registerOverviewAccessor(CRAWL_STATUS_TABLE_NAME,
new CrawlManagerStatusAccessor.CrawlOverview(this));
statusServ.registerStatusAccessor(CRAWL_URLS_STATUS_TABLE,
new CrawlUrlsStatusAccessor(this));
statusServ.registerStatusAccessor(SINGLE_CRAWL_STATUS_TABLE,
new SingleCrawlStatusAccessor(this));
// register our AU event handler
auCreateDestroyHandler = new AuEventHandler.Base() {
@Override public void auDeleted(AuEvent event, ArchivalUnit au) {
auEventDeleted(au);
}
@Override public void auCreated(AuEvent event, ArchivalUnit au) {
rebuildQueueSoon();
}
};
pluginMgr.registerAuEventHandler(auCreateDestroyHandler);
if (!paramOdc && paramQueueEnabled) {
poolQueue = new BoundedPriorityQueue(paramPoolQueueSize,
new CrawlQueueComparator());
pool = new PooledExecutor(poolQueue, paramMaxPoolSize);
} else {
poolQueue = null;
pool = new PooledExecutor(paramMaxPoolSize);
}
// Thread pool favors queueing once min threads exist, so must set min
// threads equal to max threads
pool.setMinimumPoolSize(paramMaxPoolSize);
pool.setKeepAliveTime(paramPoolKeepaliveTime);
if (paramOdc) {
pool.waitWhenBlocked();
} else {
pool.abortWhenBlocked();
}
logger.debug2("Crawler thread pool min, max, queuelen: " +
pool.getMinimumPoolSize() + ", " +
pool.getMaximumPoolSize() + ", " +
(poolQueue != null ? poolQueue.capacity() : 0));
if (paramOdc || paramStartCrawlsInterval > 0) {
enableCrawlStarter();
}
}
/**
* stop the crawl manager
* @see org.lockss.app.LockssManager#stopService()
*/
public void stopService() {
shuttingDown = true;
disableCrawlStarter();
if (pool != null) {
pool.shutdownNow();
}
if (auCreateDestroyHandler != null) {
pluginMgr.unregisterAuEventHandler(auCreateDestroyHandler);
auCreateDestroyHandler = null;
}
// checkpoint here
StatusService statusServ = getDaemon().getStatusService();
if (statusServ != null) {
statusServ.unregisterStatusAccessor(CRAWL_STATUS_TABLE_NAME);
statusServ.unregisterOverviewAccessor(CRAWL_STATUS_TABLE_NAME);
statusServ.unregisterStatusAccessor(CRAWL_URLS_STATUS_TABLE);
statusServ.unregisterStatusAccessor(SINGLE_CRAWL_STATUS_TABLE);
}
super.stopService();
}
public void setConfig(Configuration config, Configuration oldConfig,
Configuration.Differences changedKeys) {
if (changedKeys.contains(PREFIX)) {
contentCrawlExpiration =
config.getTimeInterval(PARAM_NEW_CONTENT_CRAWL_EXPIRATION,
DEFAULT_NEW_CONTENT_CRAWL_EXPIRATION);
repairCrawlExpiration =
config.getTimeInterval(PARAM_REPAIR_CRAWL_EXPIRATION,
DEFAULT_REPAIR_CRAWL_EXPIRATION);
percentRepairFromCache =
config.getPercentage(PARAM_REPAIR_FROM_CACHE_PERCENT,
DEFAULT_REPAIR_FROM_CACHE_PERCENT);
crawlerEnabled =
config.getBoolean(PARAM_CRAWLER_ENABLED,
DEFAULT_CRAWLER_ENABLED);
paramQueueEnabled =
config.getBoolean(PARAM_CRAWLER_QUEUE_ENABLED,
DEFAULT_CRAWLER_QUEUE_ENABLED);
paramMaxPoolSize = config.getInt(PARAM_CRAWLER_THREAD_POOL_MAX,
DEFAULT_CRAWLER_THREAD_POOL_MAX);
paramPoolKeepaliveTime =
config.getTimeInterval(PARAM_CRAWLER_THREAD_POOL_KEEPALIVE,
DEFAULT_CRAWLER_THREAD_POOL_KEEPALIVE);
if (pool != null) {
pool.setMaximumPoolSize(paramMaxPoolSize);
pool.setMinimumPoolSize(paramMaxPoolSize);
pool.setKeepAliveTime(paramPoolKeepaliveTime);
}
paramPoolQueueSize =
config.getInt(PARAM_CRAWLER_THREAD_POOL_QUEUE_SIZE,
DEFAULT_CRAWLER_THREAD_POOL_QUEUE_SIZE);
paramPoolMaxQueueSize =
config.getInt(PARAM_CRAWLER_THREAD_POOL_MAX_QUEUE_SIZE,
DEFAULT_CRAWLER_THREAD_POOL_MAX_QUEUE_SIZE);
if (poolQueue != null && paramPoolQueueSize != poolQueue.capacity()) {
// poolQueue.setCapacity(paramPoolQueueSize);
}
paramQueueEmptySleep = config.getTimeInterval(PARAM_QUEUE_EMPTY_SLEEP,
DEFAULT_QUEUE_EMPTY_SLEEP);
paramQueueRecalcAfterNewAu =
config.getTimeInterval(PARAM_QUEUE_RECALC_AFTER_NEW_AU,
DEFAULT_QUEUE_RECALC_AFTER_NEW_AU);
paramUnsharedQueueMax = config.getInt(PARAM_UNSHARED_QUEUE_MAX,
DEFAULT_UNSHARED_QUEUE_MAX);
paramSharedQueueMax = config.getInt(PARAM_SHARED_QUEUE_MAX,
DEFAULT_SHARED_QUEUE_MAX);
unsharedRateReqs.setMaxSize(paramUnsharedQueueMax);
sharedRateReqs.setTreeSetSize(paramSharedQueueMax);
paramFavorUnsharedRateThreads =
config.getInt(PARAM_FAVOR_UNSHARED_RATE_THREADS,
DEFAULT_FAVOR_UNSHARED_RATE_THREADS);
paramCrawlOrder = (CrawlOrder)config.getEnum(CrawlOrder.class,
PARAM_CRAWL_ORDER,
DEFAULT_CRAWL_ORDER);
paramRebuildCrawlQueueInterval =
config.getTimeInterval(PARAM_REBUILD_CRAWL_QUEUE_INTERVAL,
DEFAULT_REBUILD_CRAWL_QUEUE_INTERVAL);
paramMinWindowOpenFor =
config.getTimeInterval(PARAM_MIN_WINDOW_OPEN_FOR,
DEFAULT_MIN_WINDOW_OPEN_FOR);
paramRestartAfterCrash =
config.getBoolean(PARAM_RESTART_AFTER_CRASH,
DEFAULT_RESTART_AFTER_CRASH);
paramStartCrawlsInitialDelay =
config.getTimeInterval(PARAM_START_CRAWLS_INITIAL_DELAY,
DEFAULT_START_CRAWLS_INITIAL_DELAY);
if (changedKeys.contains(PARAM_CRAWL_PRIORITY_AUID_MAP)) {
crawlPriorityAuidMap =
makeCrawlPriorityAuidMap(config.getList(PARAM_CRAWL_PRIORITY_AUID_MAP,
DEFAULT_CRAWL_PRIORITY_AUID_MAP));
if (areAusStarted()) {
rebuildQueueSoon();
}
}
if (changedKeys.contains(PARAM_CONCURRENT_CRAWL_LIMIT_MAP)) {
concurrentCrawlLimitMap =
makeCrawlPoolSizeMap(config.getList(PARAM_CONCURRENT_CRAWL_LIMIT_MAP,
DEFAULT_CONCURRENT_CRAWL_LIMIT_MAP));
resetCrawlPoolSizes();
}
if (changedKeys.contains(PARAM_START_CRAWLS_INTERVAL)) {
paramStartCrawlsInterval =
config.getTimeInterval(PARAM_START_CRAWLS_INTERVAL,
DEFAULT_START_CRAWLS_INTERVAL);
if (paramStartCrawlsInterval > 0) {
if (theApp.isAppRunning()) {
enableCrawlStarter();
}
} else {
disableCrawlStarter();
}
}
if (changedKeys.contains(PARAM_EXCLUDE_URL_PATTERN)) {
setExcludedUrlPattern(config.get(PARAM_EXCLUDE_URL_PATTERN,
DEFAULT_EXCLUDE_URL_PATTERN),
DEFAULT_EXCLUDE_URL_PATTERN);
}
if (changedKeys.contains(PARAM_MAX_REPAIR_RATE)) {
repairRateLimiters.resetRateLimiters(config);
}
if (changedKeys.contains(PARAM_MAX_NEW_CONTENT_RATE)) {
newContentRateLimiters.resetRateLimiters(config);
}
if (changedKeys.contains(PARAM_MAX_PLUGIN_REGISTRY_NEW_CONTENT_RATE)) {
pluginRegistryNewContentRateLimiters.resetRateLimiters(config);
}
if (changedKeys.contains(PARAM_NEW_CONTENT_START_RATE)) {
newContentStartRateLimiter =
RateLimiter.getConfiguredRateLimiter(config,
newContentStartRateLimiter,
PARAM_NEW_CONTENT_START_RATE,
DEFAULT_NEW_CONTENT_START_RATE);
}
if (changedKeys.contains(PARAM_HISTORY_MAX) ) {
histSize = config.getInt(PARAM_HISTORY_MAX, DEFAULT_HISTORY_MAX);
if (cmStatus != null) {
cmStatus.setHistSize(histSize);
}
}
}
}
public boolean isCrawlerEnabled() {
return crawlerEnabled;
}
public boolean isGloballyExcludedUrl(ArchivalUnit au, String url) {
if (globallyExcludedUrlPattern == null) {
return false;
}
return RegexpUtil.getMatcher().contains(url, globallyExcludedUrlPattern);
}
void setExcludedUrlPattern(String pat, String defaultPat) {
if (pat != null) {
int flags =
Perl5Compiler.READ_ONLY_MASK | Perl5Compiler.CASE_INSENSITIVE_MASK;
try {
globallyExcludedUrlPattern = RegexpUtil.getCompiler().compile(pat,
flags);
logger.info("Global exclude pattern: " + pat);
return;
} catch (MalformedPatternException e) {
logger.error("Illegal global exclude pattern: " + pat, e);
if (defaultPat != null && !defaultPat.equals(pat)) {
try {
globallyExcludedUrlPattern =
RegexpUtil.getCompiler().compile(defaultPat, flags);
logger.info("Using default global exclude pattern: " + defaultPat);
return;
} catch (MalformedPatternException e2) {
logger.error("Illegal default global exclude pattern: "
+ defaultPat,
e2);
}
}
}
}
globallyExcludedUrlPattern = null;
logger.debug("No global exclude pattern");
}
static final Runnable NULL_RUNNER = new Runnable() {
public void run() {}
};
/**
* Execute the runnable in a pool thread
* @param run the Runnable to be run
* @throws RuntimeException if no pool thread or queue space is available
*/
protected void execute(Runnable run) throws InterruptedException {
pool.execute(run);
if (logger.isDebug3()) logger.debug3("Queued/started " + run);
if (paramOdc) {
if (logger.isDebug3()) logger.debug3("waiting");
pool.execute(NULL_RUNNER);
if (logger.isDebug3()) logger.debug3("waited");
}
}
/** Info about all the crawls running in a crawl pool. Ensures that they
* all use the same set of rate limiters. */
class PoolCrawlers {
String poolKey;
int max;
Set<CrawlRateLimiter> crls = new HashSet<CrawlRateLimiter>();
Map<Crawler,CrawlRateLimiter> crlMap =
new HashMap<Crawler,CrawlRateLimiter>();
boolean isShared = false;
PoolCrawlers(String key) {
this.poolKey = key;
setMax();
}
void setMax() {
max = getCrawlPoolSize(poolKey);
}
void setShared() {
isShared = true;
}
boolean isShared() {
return isShared;
}
/** Add a crawler and assign it a CrawlRateLimiter from the available
* pool */
synchronized void addCrawler(Crawler crawler) {
if (crlMap.containsKey(crawler)) {
logger.warning("Adding redundant crawler: " + crawler.getAu() + ", " +
crawler, new Throwable());
return;
}
CrawlRateLimiter crl;
if (crls.size() < max) {
crl = newCrawlRateLimiter(crawler.getAu());
crls.add(crl);
} else {
crl = chooseCrawlRateLimiter(crawler);
}
crl.addCrawler(crawler);
crlMap.put(crawler, crl);
}
synchronized boolean isEmpty() {
return crlMap.isEmpty();
}
/** Remove a crawler, inform the CrawlRateLimiter */
synchronized void removeCrawler(Crawler crawler) {
CrawlRateLimiter crl = crlMap.get(crawler);
if (crl != null) {
crl.removeCrawler(crawler);
} else {
logger.error("Stopping crawler with no crl: " + crawler);
}
crlMap.remove(crawler);
}
/** New content crawls prefer a crl that's not in use by any other new
* content crawls. (One should always exist, unless pool size changed
* between nextReqFromBuiltQueue() and now.) Repair crawls get crl
* with minimum use count.
*/
CrawlRateLimiter chooseCrawlRateLimiter(Crawler crawler) {
CrawlRateLimiter res = null;
for (CrawlRateLimiter crl : crls) {
if (crawler.isWholeAU() && crl.getNewContentCount() != 0) {
continue;
}
if (res == null || crl.getCrawlerCount() < res.getCrawlerCount()) {
res = crl;
}
}
if (res == null) {
// This can happen if the pool size is changing (due to a config
// update) such that nextReqFromBuiltQueue() sees a different size
// than has yet been communicated to the crawl pools by our config
// callback. The temporary consequences of causing this crawl
// start to fail seem better than allowing it to proceed with a
// (needlessly) shared rate limiter).
throw new IllegalStateException("No crl available for: " + crawler
+ " in pool: " + poolKey);
}
return res;
}
}
protected CrawlRateLimiter newCrawlRateLimiter(ArchivalUnit au) {
return CrawlRateLimiter.Util.forAu(au);
}
protected String getPoolKey(ArchivalUnit au) {
String pool = au.getFetchRateLimiterKey();
if (pool == null) {
pool = au.getAuId();
}
return pool;
}
void resetCrawlPoolSizes() {
synchronized (runningCrawlersLock) {
for (PoolCrawlers pc : poolMap.values()) {
pc.setMax();
}
}
}
protected void addToRunningCrawls(ArchivalUnit au, Crawler crawler) {
synchronized (runningCrawlersLock) {
String pool = getPoolKey(au);
logger.debug3("addToRunningCrawls: " + au + " to: " + pool);
PoolCrawlers pc = poolMap.get(pool);
if (pc == null) {
pc = new PoolCrawlers(pool);
if (au.getFetchRateLimiterKey() != null) {
pc.setShared();
}
poolMap.put(pool, pc);
}
pc.addCrawler(crawler);
// It's possible for an AU's crawl pool to change (e.g., if the title
// DB is updated). Ensure we the now-current pool when we remove the
// crawler later.
crawler.setCrawlPool(pool);
if (crawler.isWholeAU()) {
setRunningNCCrawl(au, true);
}
}
synchronized (highPriorityCrawlRequests) {
highPriorityCrawlRequests.remove(au);
}
}
protected void removeFromRunningCrawls(Crawler crawler) {
logger.debug3("removeFromRunningCrawls: " + crawler);
if (crawler != null) {
ArchivalUnit au = crawler.getAu();
synchronized (runningCrawlersLock) {
String pool = crawler.getCrawlPool();
PoolCrawlers pc = poolMap.get(pool);
pc.removeCrawler(crawler);
if (pc.isEmpty()) {
poolMap.remove(pool);
}
if (crawler.isWholeAU()) {
setRunningNCCrawl(au, false);
startOneWait.expire();
}
}
}
}
private void setRunningNCCrawl(ArchivalUnit au, boolean val) {
if (val) {
runningNCCrawls.add(au);
} else {
runningNCCrawls.remove(au);
}
cmStatus.setRunningNCCrawls(new ArrayList(runningNCCrawls));
}
protected boolean isRunningNCCrawl(ArchivalUnit au) {
synchronized (runningCrawlersLock) {
return runningNCCrawls.contains(au);
}
}
public CrawlRateLimiter getCrawlRateLimiter(Crawler crawler) {
synchronized (runningCrawlersLock) {
PoolCrawlers pc = poolMap.get(crawler.getCrawlPool());
if (pc == null) {
return null;
}
CrawlRateLimiter res = pc.crlMap.get(crawler);
if (res == null) {
throw new RuntimeException("No CrawlRateLimiter for: " + crawler);
}
return res;
}
}
void auEventDeleted(ArchivalUnit au) {
removeAuFromQueues(au);
synchronized(runningCrawlersLock) {
for (PoolCrawlers pc : poolMap.values()) {
for (Crawler crawler : pc.crlMap.keySet()) {
if (au == crawler.getAu()) {
crawler.abortCrawl();
}
}
}
}
// Notify CrawlerStatus objects to discard any pointer to this AU
for (CrawlerStatus status : cmStatus.getCrawlerStatusList()) {
status.auDeleted(au);
}
}
// Overridable for testing
protected boolean isInternalAu(ArchivalUnit au) {
return pluginMgr.isInternalAu(au);
}
public RateLimiter getNewContentRateLimiter(ArchivalUnit au) {
if (isInternalAu(au)) {
return pluginRegistryNewContentRateLimiters.getRateLimiter(au);
} else {
return newContentRateLimiters.getRateLimiter(au);
}
}
/** Set up crawl priority map. */
Map<Pattern,Integer> makeCrawlPriorityAuidMap(Collection<String> patternPairs) {
if (patternPairs != null) {
Map<Pattern,Integer> map = new LinkedMap();
for (String pair : patternPairs) {
// Find the last occurrence of comma to avoid regexp quoting
int pos = pair.lastIndexOf(',');
if (pos < 0) {
logger.error("Malformed auid-regexp,priority pair, ignored: " + pair);
continue;
}
String regexp = pair.substring(0, pos);
String pristr = pair.substring(pos + 1);
int pri;
Pattern pat;
try {
pri = Integer.parseInt(pristr);
int flags = Perl5Compiler.READ_ONLY_MASK;
pat = RegexpUtil.getCompiler().compile(regexp, flags);
logger.info("Crawl priority " + pri +
", auid pattern: " + pat.getPattern());
map.put(pat, pri);
} catch (MalformedPatternException e) {
logger.error("Illegal crawl priority pattern, ignored: " + regexp, e);
}
}
return map;
}
return null;
}
/** Set up crawl pool size map. */
Map<String,Integer> makeCrawlPoolSizeMap(Collection<String> pairs) {
if (pairs != null) {
Map<String,Integer> map = new HashMap();
for (String pair : pairs) {
List<String> onePair = StringUtil.breakAt(pair, ",");
if (onePair.size() != 2) {
logger.error("Malformed pool,size pair, ignored: " + pair);
continue;
}
String pool = onePair.get(0);
try {
int size = Integer.parseInt(onePair.get(1));
logger.info("Crawl pool " + pool + ", size " + size);
map.put(pool, size);
} catch (NumberFormatException e) {
logger.error("Illegal crawl pool size, ignored: " + pool + ", "
+ onePair.get(1), e);
}
}
return map;
}
return null;
}
public void startRepair(ArchivalUnit au, Collection urls,
CrawlManager.Callback cb, Object cookie,
ActivityRegulator.Lock lock) {
//XXX check to make sure no other crawls are running and queue if they are
if (au == null) {
throw new IllegalArgumentException("Called with null AU");
}
if (urls == null) {
throw new IllegalArgumentException("Called with null URL");
}
// check rate limiter before obtaining locks
RateLimiter limiter = repairRateLimiters.getRateLimiter(au);
if (!limiter.isEventOk()) {
logger.debug("Repair aborted due to rate limiter.");
callCallback(cb, cookie, false, null);
return;
}
// check with regulator and start repair
Map locks = getRepairLocks(au, urls, lock);
if (locks.isEmpty()) {
logger.debug("Repair aborted due to activity lock.");
callCallback(cb, cookie, false, null);
return;
}
Crawler crawler = null;
try {
if (locks.size() < urls.size()) {
cb = new FailingCallbackWrapper(cb);
}
crawler = makeRepairCrawler(au, au.getCrawlSpec(),
locks.keySet(), percentRepairFromCache);
CrawlRunner runner =
new CrawlRunner(crawler, null, cb, cookie, locks.values(), limiter);
cmStatus.addCrawlStatus(crawler.getStatus());
addToRunningCrawls(au, crawler);
new Thread(runner).start();
} catch (RuntimeException re) {
logger.error("Couldn't start repair crawl thread", re);
logger.debug("Freeing repair locks...");
Iterator lockIt = locks.values().iterator();
while (lockIt.hasNext()) {
ActivityRegulator.Lock deadLock =
(ActivityRegulator.Lock)lockIt.next();
deadLock.expire();
}
lock.expire();
removeFromRunningCrawls(crawler);
callCallback(cb, cookie, false, null);
throw re;
}
}
private Map getRepairLocks(ArchivalUnit au, Collection urlStrs,
ActivityRegulator.Lock mainLock) {
Map locks = new HashMap();
ActivityRegulator ar = getDaemon().getActivityRegulator(au);
String mainCusUrl = "";
if ((mainLock!=null) && (mainLock.getCachedUrlSet()!=null)) {
mainCusUrl = mainLock.getCachedUrlSet().getUrl();
}
for (Iterator it = urlStrs.iterator(); it.hasNext();) {
String url = (String)it.next();
ActivityRegulator.Lock lock;
if (url.equals(mainCusUrl)) {
mainLock.setNewActivity(ActivityRegulator.REPAIR_CRAWL,
repairCrawlExpiration);
lock = mainLock;
} else {
lock = ar.getCusActivityLock(createSingleNodeCachedUrlSet(au, url),
ActivityRegulator.REPAIR_CRAWL,
repairCrawlExpiration);
}
if (lock != null) {
locks.put(url, lock);
if (logger.isDebug3()) logger.debug3("Locked "+url);
} else {
if (logger.isDebug3()) logger.debug3("Couldn't lock "+url);
}
}
return locks;
}
private static CachedUrlSet createSingleNodeCachedUrlSet(ArchivalUnit au,
String url) {
return au.makeCachedUrlSet(new SingleNodeCachedUrlSetSpec(url));
}
public boolean isEligibleForNewContentCrawl(ArchivalUnit au) {
if (isRunningNCCrawl(au)) {
return false;
}
if (au instanceof ExplodedArchivalUnit) {
logger.debug("Can't crawl ExplodedArchivalUnit");
return false;
}
CrawlSpec spec;
try {
spec = au.getCrawlSpec();
} catch (RuntimeException e) {
// not clear this can ever happen in real use, but some tests force
// getCrawlSpec() to throw
logger.error("Couldn't get CrawlSpec: " + au, e);
return false;
}
if (spec != null && !windowOkToStart(spec.getCrawlWindow())) {
logger.debug3("Not crawlable: crawl window: " + au);
return false;
}
RateLimiter limiter = getNewContentRateLimiter(au);
if (limiter != null && !limiter.isEventOk()) {
logger.debug3("Not crawlable: rate limiter: " + au);
return false;
}
return true;
}
public void startNewContentCrawl(ArchivalUnit au, CrawlManager.Callback cb,
Object cookie, ActivityRegulator.Lock lock) {
startNewContentCrawl(au, 0, cb, cookie, lock);
}
public void startNewContentCrawl(ArchivalUnit au, int priority,
CrawlManager.Callback cb,
Object cookie, ActivityRegulator.Lock lock) {
if (au == null) {
throw new IllegalArgumentException("Called with null AU");
}
if (!crawlerEnabled) {
logger.warning("Crawler disabled, not crawling: " + au);
callCallback(cb, cookie, false, null);
return;
}
CrawlReq req;
try {
req = new CrawlReq(au, cb, cookie, lock);
req.setPriority(priority);
} catch (RuntimeException e) {
logger.error("Couldn't create CrawlReq: " + au, e);
callCallback(cb, cookie, false, null);
return;
}
if (paramOdc) {
enqueueHighPriorityCrawl(req);
} else {
handReqToPool(req);
}
}
void handReqToPool(CrawlReq req) {
ArchivalUnit au = req.au;
CrawlManager.Callback cb = req.cb;
Object cookie = req.cookie;
ActivityRegulator.Lock lock = req.lock;
if (!isEligibleForNewContentCrawl(au)) {
callCallback(cb, cookie, false, null);
return;
}
if ((lock==null) || (lock.isExpired())) {
lock = getNewContentLock(au);
} else {
lock.setNewActivity(ActivityRegulator.NEW_CONTENT_CRAWL,
contentCrawlExpiration);
}
if (lock == null) {
logger.debug("Not starting new content crawl due to activity lock: "
+ au);
callCallback(cb, cookie, false, null);
return;
}
CrawlSpec spec = au.getCrawlSpec();
Crawler crawler = null;
CrawlRunner runner = null;
try {
crawler = makeNewContentCrawler(au, spec);
runner = new CrawlRunner(crawler, spec, cb, cookie, SetUtil.set(lock),
getNewContentRateLimiter(au),
newContentStartRateLimiter);
// To avoid race, must add to running crawls before starting
// execution
addToRunningCrawls(au, crawler);
if (paramOdc) {
// Add status first. execute might not return for a long time, and
// we're expecting this crawl to be accepted.
cmStatus.addCrawlStatus(crawler.getStatus());
execute(runner);
return;
} else {
// Add to status only if successfully queued or started. (No
// race here; appearance in status might be delayed.)
execute(runner);
cmStatus.addCrawlStatus(crawler.getStatus());
return;
}
} catch (InterruptedException e) {
if (!isShuttingDown()) {
// thrown by pool if can't execute (pool & queue full, or pool full
// and no queue. In on-demand mode should throw only on shutdown.)
String crawlerRunner =
(crawler == null ? "no crawler" : crawler.toString()) + " " +
(runner == null ? "no runner" : runner.toString());
if (e.getMessage() != null &&
e.getMessage().endsWith("Pool is blocked")) {
logger.warning("Couldn't start/schedule " + au + " crawl: " +
e.toString() + " " + crawlerRunner);
} else {
logger.warning("Couldn't start/schedule " + au + " crawl" + " " +
crawlerRunner, e);
}
}
logger.debug2("Freeing crawl lock");
lock.expire();
removeFromRunningCrawls(crawler);
callCallback(cb, cookie, false, null);
return;
} catch (RuntimeException e) {
String crawlerRunner =
(crawler == null ? "no crawler" : crawler.toString()) + " " +
(runner == null ? "no runner" : runner.toString());
logger.error("Unexpected error attempting to start/schedule " + au +
" crawl" + " " + crawlerRunner, e);
logger.debug2("Freeing crawl lock");
lock.expire();
removeFromRunningCrawls(crawler);
callCallback(cb, cookie, false, null);
return;
}
}
private ActivityRegulator.Lock getNewContentLock(ArchivalUnit au) {
ActivityRegulator ar = getDaemon().getActivityRegulator(au);
return ar.getAuActivityLock(ActivityRegulator.NEW_CONTENT_CRAWL,
contentCrawlExpiration);
}
//method that calls the callback and catches any exception
private static void callCallback(CrawlManager.Callback cb, Object cookie,
boolean successful, CrawlerStatus status) {
if (cb != null) {
try {
cb.signalCrawlAttemptCompleted(successful, cookie, status);
} catch (Exception e) {
logger.error("Crawl callback threw", e);
}
}
}
protected Crawler makeNewContentCrawler(ArchivalUnit au, CrawlSpec spec) {
//check CrawlSpec if it is Oai Type then create OaiCrawler Instead of NewContentCrawler
if (spec instanceof OaiCrawlSpec) {
logger.debug("Creating OaiCrawler for " + au);
OaiCrawler oc = new OaiCrawler(au, spec, AuUtil.getAuState(au));
oc.setCrawlManager(this);
return oc;
} else {
logger.debug("Creating NewContentCrawler for " + au);
NewContentCrawler nc =
new NewContentCrawler(au, spec, AuUtil.getAuState(au));
nc.setCrawlManager(this);
return nc;
}
}
protected Crawler makeRepairCrawler(ArchivalUnit au,
CrawlSpec spec,
Collection repairUrls,
float percentRepairFromCache) {
RepairCrawler rc = new RepairCrawler(au, spec, AuUtil.getAuState(au),
repairUrls, percentRepairFromCache);
rc.setCrawlManager(this);
return rc;
}
static String makeThreadName(Crawler crawler) {
return AuUtil.getThreadNameFor(getThreadNamePrefix(crawler),
crawler.getAu());
}
static String getThreadNamePrefix(Crawler crawler) {
return crawler.getType().toString() + " Crawl";
}
private static int createIndex = 0;
public class CrawlRunner extends LockssRunnable {
private Object cookie;
private Crawler crawler;
private CrawlManager.Callback cb;
private Collection locks;
private RateLimiter auRateLimiter;
private RateLimiter startRateLimiter;
private CrawlSpec spec;
private int sortOrder;
private CrawlRunner(Crawler crawler, CrawlSpec spec,
CrawlManager.Callback cb,
Object cookie, Collection locks,
RateLimiter auRateLimiter) {
this(crawler, spec, cb, cookie, locks, auRateLimiter, null);
}
private CrawlRunner(Crawler crawler, CrawlSpec spec,
CrawlManager.Callback cb,
Object cookie, Collection locks,
RateLimiter auRateLimiter,
RateLimiter startRateLimiter) {
super(makeThreadName(crawler));
this.cb = cb;
this.cookie = cookie;
this.crawler = crawler;
this.locks = locks;
this.auRateLimiter = auRateLimiter;
this.startRateLimiter = startRateLimiter;
this.spec = spec;
// queue in order created
this.sortOrder = ++createIndex;
if (crawler.getAu() instanceof RegistryArchivalUnit) {
// except for registry AUs, which always come first
sortOrder = -sortOrder;
}
}
public String toString() {
return "[CrawlRunner: " + crawler.getAu() + "]";
}
public Crawler getCrawler() {
return crawler;
}
public int getSortOrder() {
return sortOrder;
}
public void lockssRun() {
//pull out of thread
boolean crawlSuccessful = false;
if (logger.isDebug3()) logger.debug3("Runner started");
try {
if (!crawlerEnabled) {
crawler.getStatus().setCrawlStatus(Crawler.STATUS_ABORTED,
"Crawler disabled");
nowRunning();
// exit immediately
} else {
setPriority(PRIORITY_PARAM_CRAWLER, PRIORITY_DEFAULT_CRAWLER);
crawler.setWatchdog(this);
startWDog(WDOG_PARAM_CRAWLER, WDOG_DEFAULT_CRAWLER);
// don't record event if crawl is going to abort immediately
if (spec == null || spec.inCrawlWindow()) {
if (auRateLimiter != null) {
auRateLimiter.event();
}
}
nowRunning();
if (startRateLimiter != null) {
// Use RateLimiter to ensure at least a small amount of time
// between crawl starts, so they don't start out doing their
// fetches in synch. This imposes an arbitrary ordering on
// crawl threads that are ready to start.
synchronized (startRateLimiter) {
startRateLimiter.waitUntilEventOk();
instrumentBeforeStartRateLimiterEvent(crawler);
startRateLimiter.event();
if (logger.isDebug3()) {
logger.debug3("Proceeding from start rate limiter");
}
}
}
crawlSuccessful = crawler.doCrawl();
if (!crawlSuccessful && spec != null && !spec.inCrawlWindow()) {
// If aborted due to crawl window, undo the charge against its
// rate limiter so it can start again when window opens
if (auRateLimiter != null) {
auRateLimiter.unevent();
}
}
}
} catch (InterruptedException ignore) {
// no action
} finally {
// free all locks, regardless of exceptions
if (locks != null) {
try {
for (Iterator lockIt = locks.iterator(); lockIt.hasNext(); ) {
ActivityRegulator.Lock lock =
(ActivityRegulator.Lock)lockIt.next();
lock.expire();
}
} catch (RuntimeException e) {
logger.warning("Couldn't free locks", e);
}
}
removeFromRunningCrawls(crawler);
cmStatus.incrFinished(crawlSuccessful);
CrawlerStatus cs = crawler.getStatus();
cmStatus.touchCrawlStatus(cs);
signalAuEvent(crawler, cs);
// must call callback before sealing counters. V3Poller relies
// on fetched URL list
callCallback(cb, cookie, crawlSuccessful, cs);
if (cs != null) cs.sealCounters();
setThreadName(getThreadNamePrefix(crawler) + ": idle");
}
}
}
private void signalAuEvent(Crawler crawler, CrawlerStatus cs) {
final ArchivalUnit au = crawler.getAu();
final AuEventHandler.ChangeInfo chInfo = new AuEventHandler.ChangeInfo();
Collection<String> mimeTypes = cs.getMimeTypes();
if (mimeTypes != null) {
Map<String,Integer> mimeCounts = new HashMap<String,Integer>();
for (String mimeType : mimeTypes) {
mimeCounts.put(mimeType, cs.getMimeTypeCtr(mimeType).getCount());
}
chInfo.setMimeCounts(mimeCounts);
}
int num = cs.getNumFetched();
chInfo.setNumUrls(num);
if (crawler.isWholeAU()) {
chInfo.setType(AuEventHandler.ChangeInfo.Type.Crawl);
} else {
chInfo.setType(AuEventHandler.ChangeInfo.Type.Repair);
chInfo.setUrls(cs.getUrlsFetched());
}
chInfo.setAu(au);
chInfo.setComplete(!cs.isCrawlError());
pluginMgr.applyAuEvent(new PluginManager.AuEventClosure() {
public void execute(AuEventHandler hand) {
hand.auContentChanged(AuEvent.ContentChanged,
au, chInfo);
}
});
}
// For testing only. See TestCrawlManagerImpl
protected void instrumentBeforeStartRateLimiterEvent(Crawler crawler) {
}
// Crawl starter thread.
private CrawlStarter crawlStarter = null;
private boolean isCrawlStarterEnabled = false;
public void enableCrawlStarter() {
if (crawlStarter != null) {
logger.debug("Crawl starter already running; stopping old one first");
disableCrawlStarter();
}
if (paramStartCrawlsInterval > 0) {
logger.info("Starting crawl starter");
crawlStarter = new CrawlStarter();
new Thread(crawlStarter).start();
isCrawlStarterEnabled = true;
} else {
logger.info("Crawl starter not enabled");
}
}
public void disableCrawlStarter() {
if (crawlStarter != null) {
logger.info("Stopping crawl starter");
crawlStarter.stopCrawlStarter();
crawlStarter.waitExited(Deadline.in(Constants.SECOND));
crawlStarter = null;
}
isCrawlStarterEnabled = false;
}
public boolean isCrawlStarterEnabled() {
return isCrawlStarterEnabled;
}
/** Orders CrawlRunners according to the sort order they specify */
static class CrawlQueueComparator implements Comparator {
public int compare(Object a, Object b) {
CrawlManagerImpl.CrawlRunner ra = (CrawlManagerImpl.CrawlRunner)a;
CrawlManagerImpl.CrawlRunner rb = (CrawlManagerImpl.CrawlRunner)b;
return ra.getSortOrder() - rb.getSortOrder();
}
}
private class CrawlStarter extends LockssRunnable {
private volatile boolean goOn = true;
private CrawlStarter() {
super("CrawlStarter");
}
public void lockssRun() {
setPriority(PRIORITY_PARAM_CRAWLER, PRIORITY_DEFAULT_CRAWLER);
// Crawl start interval is configurable, so watchdog timeout would
// have to be also. Crawl starter is so simple; not sure it really
// needs a watchdog.
// startWDog(WDOG_PARAM_CRAWL_STARTER, WDOG_DEFAULT_CRAWL_STARTER);
triggerWDogOnExit(true);
if (goOn) {
try {
logger.debug("Waiting until AUs started");
if (paramOdc) {
startOneWait.expireIn(paramStartCrawlsInitialDelay);
cmStatus.setNextCrawlStarter(startOneWait);
startOneWait.sleep();
} else {
waitUntilAusStarted();
logger.debug3("AUs started");
Deadline initial = Deadline.in(paramStartCrawlsInitialDelay);
cmStatus.setNextCrawlStarter(initial);
initial.sleep();
}
} catch (InterruptedException e) {
// just wakeup, check for exit and start running
}
}
while (goOn) {
// pokeWDog();
try {
if (paramOdc) {
startOneCrawl();
} else {
startSomeCrawls();
Deadline timer = Deadline.in(paramStartCrawlsInterval);
cmStatus.setNextCrawlStarter(timer);
if (goOn) {
try {
timer.sleep();
} catch (InterruptedException e) {
// just wakeup and check for exit
}
}
cmStatus.setNextCrawlStarter(null);
}
} catch (InterruptedException e) {
// just wakeup and check for exit
}
}
if (!goOn) {
triggerWDogOnExit(false);
}
}
private void stopCrawlStarter() {
goOn = false;
interruptThread();
}
}
// Separate so can override for testing
void waitUntilAusStarted() throws InterruptedException {
getDaemon().waitUntilAusStarted();
}
// Separate so can override for testing
boolean areAusStarted() {
// may be called before service is started (from setConfig())
return pluginMgr != null && pluginMgr.areAusStarted();
}
static Object UNSHARED_RATE_KEY = new Object();
long paramRebuildCrawlQueueInterval = DEFAULT_REBUILD_CRAWL_QUEUE_INTERVAL;
long paramQueueRecalcAfterNewAu = DEFAULT_QUEUE_RECALC_AFTER_NEW_AU;
long paramQueueEmptySleep = DEFAULT_QUEUE_EMPTY_SLEEP;
int paramUnsharedQueueMax = DEFAULT_UNSHARED_QUEUE_MAX;
int paramSharedQueueMax = DEFAULT_SHARED_QUEUE_MAX;
int paramFavorUnsharedRateThreads = DEFAULT_FAVOR_UNSHARED_RATE_THREADS;
CrawlOrder paramCrawlOrder = DEFAULT_CRAWL_ORDER;
Deadline timeToRebuildCrawlQueue = Deadline.in(0);
Deadline startOneWait = Deadline.in(0);
Map<ArchivalUnit,CrawlReq> highPriorityCrawlRequests = new ListOrderedMap();
Comparator CPC = new CrawlPriorityComparator();
Object queueLock = new Object(); // lock for sharedRateReqs and
// unsharedRateReqs
MultiCrawlPriorityMap sharedRateReqs =
new MultiCrawlPriorityMap(paramSharedQueueMax);
BoundedTreeSet unsharedRateReqs =
new BoundedTreeSet(paramUnsharedQueueMax, CPC);
class MultiCrawlPriorityMap extends MultiValueMap {
MultiCrawlPriorityMap(final int maxAus) {
super(new HashMap(), new org.apache.commons.collections.Factory() {
public Object create() {
return new BoundedTreeSet(maxAus, CPC);
}});
}
public void setTreeSetSize(int maxAus) {
for (Map.Entry ent : (Collection<Map.Entry>)entrySet()) {
BoundedTreeSet ts = (BoundedTreeSet)ent.getValue();
ts.setMaxSize(maxAus);
}
}
}
boolean windowOkToStart(CrawlWindow window) {
if (window == null) return true;
if (!window.canCrawl()) return false;
Date soon = new Date(TimeBase.nowMs() + paramMinWindowOpenFor);
return window.canCrawl(soon);
}
// Force queues to be rebuilt. Overkill, but easy and this hardly
// ever happens
void removeAuFromQueues(ArchivalUnit au) {
synchronized (highPriorityCrawlRequests) {
highPriorityCrawlRequests.remove(au);
}
synchronized (queueLock) {
unsharedRateReqs.clear();
sharedRateReqs.clear();
}
}
CrawlReq nextReq() throws InterruptedException {
boolean rebuilt = false;
if (timeToRebuildCrawlQueue.expired()) {
rebuildCrawlQueue();
rebuilt = true;
}
CrawlReq res = nextReqFromBuiltQueue();
if (res != null) {
return res;
}
if (!rebuilt) {
rebuildCrawlQueue();
}
return nextReqFromBuiltQueue();
}
CrawlReq nextReqFromBuiltQueue() {
Bag runKeys = copySharedRunKeys();
synchronized (queueLock) {
if (logger.isDebug3()) {
logger.debug3("nextReqFromBuiltQueue(), " +
sharedRateReqs.size() + " shared, " +
unsharedRateReqs.size() + " unshared, " +
" runKeys: " + runKeys);
}
// preferentially start those with shared rate limiters, but give
// unshared a minimum number of threads
BoundedTreeSet finalSort = new BoundedTreeSet(1, CPC);
for (Iterator iter = sharedRateReqs.entrySet().iterator();
iter.hasNext();) {
Map.Entry<String,TreeSet> ent = (Map.Entry)iter.next();
String rateKey = ent.getKey();
int poolSize = getCrawlPoolSize(rateKey);
if (logger.isDebug3()) {
logger.debug3("Rate key: " + rateKey + ", pool: " + poolSize +
", current: " + runKeys.getCount(rateKey));
}
if (runKeys.getCount(rateKey) >= poolSize) {
continue;
}
CrawlReq req = (CrawlReq)ent.getValue().first();
if (logger.isDebug3()) logger.debug3("Adding to final sort: " + req);
finalSort.add(req);
}
if (!unsharedRateReqs.isEmpty() &&
( finalSort.isEmpty() ||
runKeys.size() >= (paramMaxPoolSize -
paramFavorUnsharedRateThreads) ||
((CrawlReq)unsharedRateReqs.first()).isHiPri())) {
CrawlReq req = (CrawlReq)unsharedRateReqs.first();
if (logger.isDebug3()) logger.debug3("Adding to final sort: " + req);
finalSort.add(req);
}
if (finalSort.isEmpty()) {
if (logger.isDebug3()) {
logger.debug3("nextReqFromBuiltQueue(): null, " +
sharedRateReqs.size() + " shared");
}
return null;
}
CrawlReq bestReq = (CrawlReq)finalSort.first();
if (bestReq.rateKey != null) {
sharedRateReqs.remove(bestReq.rateKey, bestReq);
} else {
unsharedRateReqs.remove(bestReq);
}
logger.debug3("nextReqFromBuiltQueue: " + bestReq);
return bestReq;
}
}
int getCrawlPoolSize(String key) {
if (concurrentCrawlLimitMap != null
&& concurrentCrawlLimitMap.containsKey(key)) {
return concurrentCrawlLimitMap.get(key);
} else {
return 1;
}
}
Bag copySharedRunKeys() {
return copyRunKeys(true);
}
Bag copyRunKeys() {
return copyRunKeys(false);
}
Bag copyRunKeys(boolean sharedOnly) {
synchronized (runningCrawlersLock) {
Bag res = new HashBag();
for (Map.Entry<String,PoolCrawlers> ent : poolMap.entrySet()) {
PoolCrawlers pc = ent.getValue();
if (sharedOnly && !pc.isShared()) {
continue;
}
int sum = 0;
for (CrawlRateLimiter crl : pc.crls) {
sum += crl.getNewContentCount();
}
res.add(ent.getKey(), sum);
}
return res;
}
}
public Collection<CrawlReq> getPendingQueue() {
Collection runKeys = copyRunKeys();
TreeSet<CrawlReq> finalSort = new TreeSet(CPC);
synchronized (queueLock) {
for (Iterator iter = sharedRateReqs.entrySet().iterator();
iter.hasNext();) {
Map.Entry ent = (Map.Entry)iter.next();
Object rateKey = ent.getKey();
if (runKeys.contains(rateKey)) {
// mark it somehow
}
finalSort.addAll((TreeSet)ent.getValue());
}
finalSort.addAll(unsharedRateReqs);
}
return finalSort;
}
void enqueueHighPriorityCrawl(CrawlReq req) {
logger.debug("enqueueHighPriorityCrawl(" + req.au + ")");
synchronized (highPriorityCrawlRequests) {
highPriorityCrawlRequests.put(req.au, req);
}
timeToRebuildCrawlQueue.expire();
startOneWait.expire();
}
public void rebuildQueueSoon() {
// Don't push forward if already expired.
if (!timeToRebuildCrawlQueue.expired()) {
timeToRebuildCrawlQueue.expireIn(paramQueueRecalcAfterNewAu);
}
if (!startOneWait.expired()) {
startOneWait.expireIn(paramQueueRecalcAfterNewAu);
}
}
void rebuildCrawlQueue() {
timeToRebuildCrawlQueue.expireIn(paramRebuildCrawlQueueInterval);
long startTime = TimeBase.nowMs();
rebuildCrawlQueue0();
logger.debug("rebuildCrawlQueue(): "+
(TimeBase.nowMs() - startTime)+"ms");
}
void rebuildCrawlQueue0() {
int ausWantCrawl = 0;
int ausEligibleCrawl = 0;
synchronized (queueLock) {
unsharedRateReqs.clear();
sharedRateReqs.clear();
for (ArchivalUnit au : (areAusStarted()
? pluginMgr.getAllAus()
: getHighPriorityAus())) {
try {
CrawlReq req;
synchronized (highPriorityCrawlRequests) {
req = highPriorityCrawlRequests.get(au);
}
if ((req != null || shouldCrawlForNewContent(au))) {
ausWantCrawl++;
if (isEligibleForNewContentCrawl(au)) {
if (req == null) {
req = new CrawlReq(au);
setReqPriority(req);
}
if (req.priority > MIN_CRAWL_PRIORITY) {
ausEligibleCrawl++;
String rateKey = au.getFetchRateLimiterKey();
if (rateKey == null) {
unsharedRateReqs.add(req);
if (logger.isDebug3()) {
logger.debug3("Added to queue: null, " + req);
}
} else {
sharedRateReqs.put(rateKey, req);
if (logger.isDebug3()) {
logger.debug3("Added to pool queue: " + rateKey +
", " + req);
}
}
}
}
}
} catch (RuntimeException e) {
logger.warning("Checking for crawlworthiness: " + au.getName(), e);
// ignore AU if it caused an error
}
}
}
cmStatus.setWaitingCount(ausWantCrawl);
cmStatus.setEligibleCount(ausEligibleCrawl);
}
List<ArchivalUnit> getHighPriorityAus() {
synchronized (highPriorityCrawlRequests) {
return new ArrayList(highPriorityCrawlRequests.keySet());
}
}
void setReqPriority(CrawlReq req) {
if (crawlPriorityAuidMap != null) {
Perl5Matcher matcher = RegexpUtil.getMatcher();
String auid = req.getAu().getAuId();
for (Map.Entry<Pattern,Integer> ent : crawlPriorityAuidMap.entrySet()) {
if (matcher.contains(auid, ent.getKey())) {
if (logger.isDebug3()) {
logger.debug3("Crawl priority " + ent.getValue() +
": " + req.getAu().getName());
}
req.setPriority(ent.getValue());
return;
}
}
}
}
class CrawlPriorityComparator implements Comparator {
// Comparator should not reference NodeManager, etc., else all sorted
// collection insertions, etc. must be protected against
// NoSuchAuException
public int compare(Object o1, Object o2) {
CrawlReq r1 = (CrawlReq)o1;
CrawlReq r2 = (CrawlReq)o2;
ArchivalUnit au1 = r1.au;
ArchivalUnit au2 = r2.au;
AuState aus1 = r1.aus;
AuState aus2 = r2.aus;
CompareToBuilder ctb =
new CompareToBuilder()
.append(-r1.priority, -r2.priority)
.append(!(au1 instanceof RegistryArchivalUnit),
!(au2 instanceof RegistryArchivalUnit))
.append(previousResultOrder(aus1.getLastCrawlResult()),
previousResultOrder(aus2.getLastCrawlResult()));
switch (paramCrawlOrder) {
case CreationDate:
ctb.append(aus1.getAuCreationTime(), aus2.getAuCreationTime());
ctb.append(au1.getAuId(), au2.getAuId());
break;
case CrawlDate:
default:
ctb
.append(aus1.getLastCrawlAttempt(), aus2.getLastCrawlAttempt())
.append(aus1.getLastCrawlTime(), aus2.getLastCrawlTime())
// .append(au1.toString(), au2.toString())
.append(System.identityHashCode(r1), System.identityHashCode(r2));
break;
}
return ctb.toComparison();
}
int previousResultOrder(int crawlResult) {
final int DEFAULT = 2;
switch (crawlResult) {
case Crawler.STATUS_WINDOW_CLOSED:
return 0;
case Crawler.STATUS_RUNNING_AT_CRASH:
return paramRestartAfterCrash ? 1 : DEFAULT;
default: return DEFAULT;
}
}
}
boolean startOneCrawl() throws InterruptedException {
startOneWait.expireIn(paramQueueEmptySleep);
if (crawlerEnabled) {
CrawlReq req = nextReq();
if (req != null) {
startCrawl(req);
return true;
}
}
cmStatus.setNextCrawlStarter(startOneWait);
while (!startOneWait.expired()) {
try {
startOneWait.sleep();
} catch (InterruptedException e) {
// just wakeup and check
}
}
return false;
}
// Each invocation of startSomeCrawls() tries to fill queue with AUs that
// need a crawl. The same random iterator is used across multiple
// invocations to ensure we examine all AUs before starting over with a
// new random order.
private Iterator crawlStartIter = null;
void startSomeCrawls() {
if (crawlerEnabled && (poolQueue != null)) {
if (poolQueue.size() < paramPoolQueueSize) {
logger.debug("Checking for AUs that need crawls");
// get a new iterator if don't have one or if have exhausted
// previous one
if (crawlStartIter == null || !crawlStartIter.hasNext()) {
crawlStartIter = pluginMgr.getRandomizedAus().iterator();
}
for (Iterator iter = pluginMgr.getAllRegistryAus().iterator();
iter.hasNext() && poolQueue.size() < paramPoolQueueSize; ) {
ArchivalUnit au = (ArchivalUnit)iter.next();
possiblyStartCrawl(au);
}
while (crawlStartIter.hasNext() &&
poolQueue.size() < paramPoolQueueSize) {
ArchivalUnit au = (ArchivalUnit)crawlStartIter.next();
if (!isInternalAu(au)) {
possiblyStartCrawl(au);
}
}
}
}
}
void possiblyStartCrawl(ArchivalUnit au) {
try {
if (shouldCrawlForNewContent(au)) {
startCrawl(au);
}
} catch (IllegalArgumentException e) {
// XXX When NoSuchAuException is created, this should catch that
logger.warning("AU disappeared: " + au.getName());
}
}
void startCrawl(ArchivalUnit au) {
CrawlManager.Callback rc = null;
// Activity lock prevents AUs with pending crawls from being
// queued twice. If ActivityRegulator goes away some other
// mechanism will be needed.
startNewContentCrawl(au, rc, null, null);
}
void startCrawl(CrawlReq req) {
ArchivalUnit au = req.au;
try {
// doesn't return until thread available for next request
handReqToPool(req);
} catch (RuntimeException e) {
logger.warning("Starting crawl: " + au.getName(), e);
}
}
boolean shouldCrawlForNewContent(ArchivalUnit au) {
try {
boolean res = au.shouldCrawlForNewContent(AuUtil.getAuState(au));
if (logger.isDebug3()) logger.debug3("Should " + (res ? "" : "not ") +
"crawl " + au);
return res;
} catch (IllegalArgumentException e) {
// XXX When NoSuchAuException is created, this should catch that
logger.warning("AU disappeared: " + au.getName());
}
return false;
}
private static class FailingCallbackWrapper
implements CrawlManager.Callback {
CrawlManager.Callback cb;
public FailingCallbackWrapper(CrawlManager.Callback cb) {
this.cb = cb;
}
public void signalCrawlAttemptCompleted(boolean success,
Object cookie,
CrawlerStatus status) {
callCallback(cb, cookie, false, null);
}
}
/** Return the StatusSource */
public StatusSource getStatusSource() {
return this;
}
//CrawlManager.StatusSource methods
public CrawlManagerStatus getStatus() {
return cmStatus;
}
}
|
/*
* $Id: ArchivalUnitStatus.java,v 1.33 2005-07-25 01:20:30 tlipkis Exp $
*/
package org.lockss.state;
import java.util.*;
import java.net.MalformedURLException;
import org.lockss.config.Configuration;
import org.lockss.daemon.*;
import org.lockss.daemon.status.*;
import org.lockss.plugin.*;
import org.lockss.util.*;
import org.lockss.app.*;
import org.lockss.poller.*;
import org.lockss.protocol.*;
import org.lockss.repository.*;
import org.lockss.servlet.LockssServlet;
/**
* Collect and report the status of the ArchivalUnits
*/
public class ArchivalUnitStatus
extends BaseLockssDaemonManager implements ConfigurableManager {
public static final String PREFIX = Configuration.PREFIX + "auStatus.";
/**
* The default maximum number of nodes to display in a single page of the ui.
*/
public static final String PARAM_MAX_NODES_TO_DISPLAY =
PREFIX + "nodesPerPage";
static final int DEFAULT_MAX_NODES_TO_DISPLAY = 100;
/**
* Node URLs are links to cached content page if true
*/
public static final String PARAM_CONTENT_IS_LINK =
PREFIX + "contentUrlIsLink";
static final boolean DEFAULT_CONTENT_IS_LINK = true;
public static final String SERVICE_STATUS_TABLE_NAME =
"ArchivalUnitStatusTable";
public static final String AUIDS_TABLE_NAME = "AuIds";
public static final String AU_STATUS_TABLE_NAME = "ArchivalUnitTable";
public static final String PEERS_VOTE_TABLE_NAME = "PeerVoteSummary";
public static final String PEERS_REPAIR_TABLE_NAME = "PeerRepair";
static final OrderedObject DASH = new OrderedObject("-", new Long(-1));
private static Logger logger = Logger.getLogger("AuStatus");
private static int defaultNumRows = DEFAULT_MAX_NODES_TO_DISPLAY;
private static boolean isContentIsLink = DEFAULT_CONTENT_IS_LINK;
public void startService() {
super.startService();
StatusService statusServ = theDaemon.getStatusService();
statusServ.registerStatusAccessor(SERVICE_STATUS_TABLE_NAME,
new AuSummary(theDaemon));
statusServ.registerStatusAccessor(AUIDS_TABLE_NAME,
new AuIds(theDaemon));
statusServ.registerStatusAccessor(AU_STATUS_TABLE_NAME,
new AuStatus(theDaemon));
statusServ.registerStatusAccessor(PEERS_VOTE_TABLE_NAME,
new PeerVoteSummary(theDaemon));
statusServ.registerStatusAccessor(PEERS_REPAIR_TABLE_NAME,
new PeerRepair(theDaemon));
logger.debug2("Status accessors registered.");
}
public void stopService() {
// unregister our status accessors
StatusService statusServ = theDaemon.getStatusService();
statusServ.unregisterStatusAccessor(SERVICE_STATUS_TABLE_NAME);
statusServ.unregisterStatusAccessor(AU_STATUS_TABLE_NAME);
statusServ.unregisterStatusAccessor(PEERS_VOTE_TABLE_NAME);
statusServ.unregisterStatusAccessor(PEERS_REPAIR_TABLE_NAME);
logger.debug2("Status accessors unregistered.");
super.stopService();
}
public void setConfig(Configuration config, Configuration oldConfig,
Configuration.Differences changedKeys) {
defaultNumRows = config.getInt(PARAM_MAX_NODES_TO_DISPLAY,
DEFAULT_MAX_NODES_TO_DISPLAY);
isContentIsLink = config.getBoolean(PARAM_CONTENT_IS_LINK,
DEFAULT_CONTENT_IS_LINK);
}
static class AuSummary implements StatusAccessor {
static final String TABLE_TITLE = "Archival Units";
static final String FOOT_STATUS = "Flags may follow status: C means the AU is complete, D means that the AU is no longer available from the publisher";
private static final List columnDescriptors = ListUtil.list(
new ColumnDescriptor("AuName", "Volume", ColumnDescriptor.TYPE_STRING),
// new ColumnDescriptor("AuNodeCount", "Nodes", ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("AuSize", "Content Size",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("DiskUsage", "Disk Usage (MB)",
ColumnDescriptor.TYPE_FLOAT),
new ColumnDescriptor("Peers", "Peers", ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("AuPolls", "Polls",
ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("Damaged", "Status",
ColumnDescriptor.TYPE_STRING,
FOOT_STATUS),
new ColumnDescriptor("AuLastPoll", "Last Poll",
ColumnDescriptor.TYPE_DATE),
new ColumnDescriptor("AuLastCrawl", "Last Crawl",
ColumnDescriptor.TYPE_DATE),
new ColumnDescriptor("AuLastTreeWalk", "Last TreeWalk",
ColumnDescriptor.TYPE_DATE)
);
private static final List sortRules =
ListUtil.list(new
StatusTable.SortRule("AuName",
CatalogueOrderComparator.SINGLETON));
private LockssDaemon theDaemon;
AuSummary(LockssDaemon theDaemon) {
this.theDaemon = theDaemon;
}
public String getDisplayName() {
return TABLE_TITLE;
}
public void populateTable(StatusTable table)
throws StatusService.NoSuchTableException {
table.setColumnDescriptors(columnDescriptors);
table.setDefaultSortRules(sortRules);
table.setRows(getRows(table));
}
public boolean requiresKey() {
return false;
}
private List getRows(StatusTable table) {
PluginManager pluginMgr = theDaemon.getPluginManager();
boolean includeInternalAus =
table.getOptions().get(StatusTable.OPTION_INCLUDE_INTERNAL_AUS);
List rowL = new ArrayList();
for (Iterator iter = pluginMgr.getAllAus().iterator();
iter.hasNext(); ) {
ArchivalUnit au = (ArchivalUnit)iter.next();
if (!includeInternalAus && pluginMgr.isInternalAu(au)) {
continue;
}
try {
NodeManager nodeMan = theDaemon.getNodeManager(au);
CachedUrlSet auCus = au.getAuCachedUrlSet();
NodeState topNodeState = nodeMan.getNodeState(auCus);
rowL.add(makeRow(au, nodeMan.getAuState(), topNodeState));
} catch (Exception e) {
logger.warning("Unexpected expection building row", e);
}
}
return rowL;
}
private Map makeRow(ArchivalUnit au, AuState auState,
NodeState topNodeState) {
HashMap rowMap = new HashMap();
//"AuID"
rowMap.put("AuName", AuStatus.makeAuRef(au.getName(), au.getAuId()));
// rowMap.put("AuNodeCount", new Integer(-1));
rowMap.put("AuSize", new Long(AuUtil.getAuContentSize(au)));
rowMap.put("DiskUsage", new Double(((double)AuUtil.getAuDiskUsage(au)) / (1024*1024)));
rowMap.put("AuLastCrawl", new Long(auState.getLastCrawlTime()));
rowMap.put("Peers", PeerRepair.makeAuRef("peers", au.getAuId()));
rowMap.put("AuPolls",
theDaemon.getStatusService().
getReference(PollerStatus.MANAGER_STATUS_TABLE_NAME,
au));
rowMap.put("AuLastPoll", new Long(auState.getLastTopLevelPollTime()));
rowMap.put("AuLastTreeWalk", new Long(auState.getLastTreeWalkTime()));
Object stat = topNodeState.hasDamage()
? DAMAGE_STATE_DAMAGED : DAMAGE_STATE_OK;
boolean isPubDown = AuUtil.isPubDown(au);
boolean isClosed = AuUtil.isClosed(au);
if (isPubDown || isClosed) {
List val = ListUtil.list(stat, " (");
if (isClosed) {
val.add("C");
}
if (isPubDown) {
val.add("D");
}
val.add(")");
stat = val;
}
rowMap.put("Damaged", stat);
return rowMap;
}
}
static class AuIds implements StatusAccessor {
static final String TABLE_TITLE = "AU Ids";
private static final List columnDescriptors = ListUtil.list(
new ColumnDescriptor("AuName", "Volume", ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("AuId", "AU Id", ColumnDescriptor.TYPE_STRING)
);
private static final List sortRules =
ListUtil.list(new
StatusTable.SortRule("AuName",
CatalogueOrderComparator.SINGLETON));
private LockssDaemon theDaemon;
AuIds(LockssDaemon theDaemon) {
this.theDaemon = theDaemon;
}
public String getDisplayName() {
return TABLE_TITLE;
}
public void populateTable(StatusTable table)
throws StatusService.NoSuchTableException {
table.setColumnDescriptors(columnDescriptors);
table.setDefaultSortRules(sortRules);
table.setRows(getRows(table));
}
public boolean requiresKey() {
return false;
}
private List getRows(StatusTable table) {
PluginManager pluginMgr = theDaemon.getPluginManager();
boolean includeInternalAus =
table.getOptions().get(StatusTable.OPTION_INCLUDE_INTERNAL_AUS);
List rowL = new ArrayList();
for (Iterator iter = pluginMgr.getAllAus().iterator();
iter.hasNext(); ) {
ArchivalUnit au = (ArchivalUnit)iter.next();
if (!includeInternalAus && pluginMgr.isInternalAu(au)) {
continue;
}
try {
rowL.add(makeRow(au));
} catch (Exception e) {
logger.warning("Unexpected expection building row", e);
}
}
return rowL;
}
private Map makeRow(ArchivalUnit au) {
HashMap rowMap = new HashMap();
rowMap.put("AuId", au.getAuId());
rowMap.put("AuName", au.getName());
return rowMap;
}
}
static final StatusTable.DisplayedValue DAMAGE_STATE_OK =
new StatusTable.DisplayedValue("Ok");
static final StatusTable.DisplayedValue DAMAGE_STATE_DAMAGED =
new StatusTable.DisplayedValue("Repairing");
// static {
// DAMAGE_STATE_OK.setColor("green");
// DAMAGE_STATE_DAMAGED.setColor("yellow");
abstract static class PerAuTable implements StatusAccessor {
protected LockssDaemon theDaemon;
PerAuTable(LockssDaemon theDaemon) {
this.theDaemon = theDaemon;
}
public boolean requiresKey() {
return true;
}
public String getDisplayName() {
throw new UnsupportedOperationException("Au table has no generic title");
}
public void populateTable(StatusTable table)
throws StatusService.NoSuchTableException {
String key = table.getKey();
try {
ArchivalUnit au = theDaemon.getPluginManager().getAuFromId(key);
if (au == null) {
throw new StatusService.NoSuchTableException("Unknown auid: " + key);
}
populateTable(table, au);
} catch (StatusService.NoSuchTableException e) {
throw e;
} catch (Exception e) {
logger.warning("Error building table", e);
throw new StatusService.
NoSuchTableException("Error building table for auid: " + key);
}
}
protected abstract void populateTable(StatusTable table, ArchivalUnit au)
throws StatusService.NoSuchTableException;
}
static class AuStatus extends PerAuTable {
private static final List columnDescriptors = ListUtil.list(
new ColumnDescriptor("NodeName", "Node Url",
ColumnDescriptor.TYPE_STRING),
// new ColumnDescriptor("NodeHasContent", "Content",
// ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("NodeVersion", "Version",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("NodeContentSize", "Size",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("NodeTreeSize", "Tree Size",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("NodeChildCount", "Children",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("NodeStatus", "Status",
ColumnDescriptor.TYPE_STRING)
);
private static final List sortRules =
ListUtil.list(new StatusTable.SortRule("sort", true));
AuStatus(LockssDaemon theDaemon) {
super(theDaemon);
}
protected void populateTable(StatusTable table, ArchivalUnit au)
throws StatusService.NoSuchTableException {
LockssRepository repo = theDaemon.getLockssRepository(au);
NodeManager nodeMan = theDaemon.getNodeManager(au);
table.setTitle(getTitle(au.getName()));
CachedUrlSet auCus = au.getAuCachedUrlSet();
NodeState topNode = nodeMan.getNodeState(auCus);
table.setSummaryInfo(getSummaryInfo(au, nodeMan.getAuState(), topNode));
if (!table.getOptions().get(StatusTable.OPTION_NO_ROWS)) {
table.setColumnDescriptors(columnDescriptors);
table.setDefaultSortRules(sortRules);
table.setRows(getRows(table, au, repo, nodeMan));
}
}
int getIntProp(StatusTable table, String name) {
Properties props = table.getProperties();
if (props == null) return -1;
String s = props.getProperty(name);
if (StringUtil.isNullString(s)) return -1;
try {
return Integer.parseInt(s);
} catch (Exception e) {
return -1;
}
}
private List getRows(StatusTable table, ArchivalUnit au,
LockssRepository repo, NodeManager nodeMan) {
int startRow = Math.max(0, getIntProp(table, "skiprows"));
int numRows = getIntProp(table, "numrows");
if (numRows <= 0) {
numRows = defaultNumRows;
}
List rowL = new ArrayList();
Iterator cusIter = au.getAuCachedUrlSet().contentHashIterator();
int endRow1 = startRow + numRows; // end row + 1
if (startRow > 0) {
// add 'previous'
int start = startRow - defaultNumRows;
if (start < 0) {
start = 0;
}
rowL.add(makeOtherRowsLink(false, start, au.getAuId()));
}
for (int curRow = 0; (curRow < endRow1) && cusIter.hasNext(); curRow++) {
CachedUrlSetNode cusn = (CachedUrlSetNode)cusIter.next();
if (curRow < startRow) {
continue;
}
CachedUrlSet cus;
if (cusn.getType() == CachedUrlSetNode.TYPE_CACHED_URL_SET) {
cus = (CachedUrlSet)cusn;
} else {
CachedUrlSetSpec spec = new RangeCachedUrlSetSpec(cusn.getUrl());
cus = au.makeCachedUrlSet(spec);
}
try {
Map row = makeRow(au, repo.getNode(cus.getUrl()),
nodeMan.getNodeState(cus));
row.put("sort", new Integer(curRow));
rowL.add(row);
} catch (MalformedURLException ignore) { }
}
if (cusIter.hasNext()) {
// add 'next'
rowL.add(makeOtherRowsLink(true, endRow1, au.getAuId()));
}
return rowL;
}
private Map makeRow(ArchivalUnit au, RepositoryNode node,
NodeState state) {
String url = node.getNodeUrl();
boolean hasContent = node.hasContent();
Object val;
HashMap rowMap = new HashMap();
if (hasContent && isContentIsLink) {
Properties args = new Properties();
args.setProperty("auid", au.getAuId());
args.setProperty("url", url);
val = new StatusTable.SrvLink(url,
LockssServlet.SERVLET_DISPLAY_CONTENT,
args);
} else {
val = url;
}
rowMap.put("NodeName", val);
String status = null;
if (node.isDeleted()) {
status = "Deleted";
} else if (node.isContentInactive()) {
status = "Inactive";
} else if (state.hasDamage()) {
status = "Damaged";
} else {
// status = "Active";
}
if (status != null) {
rowMap.put("NodeStatus", status);
}
Object versionObj = DASH;
Object sizeObj = DASH;
if (hasContent) {
versionObj = new OrderedObject(new Long(node.getCurrentVersion()));
sizeObj = new OrderedObject(new Long(node.getContentSize()));
}
rowMap.put("NodeHasContent", (hasContent ? "yes" : "no"));
rowMap.put("NodeVersion", versionObj);
rowMap.put("NodeContentSize", sizeObj);
if (!node.isLeaf()) {
rowMap.put("NodeChildCount",
new OrderedObject(new Long(node.getChildCount())));
rowMap.put("NodeTreeSize",
new OrderedObject(new Long(node.getTreeContentSize(null))));
} else {
rowMap.put("NodeChildCount", DASH);
rowMap.put("NodeTreeSize", DASH);
}
return rowMap;
}
private Map makeOtherRowsLink(boolean isNext, int startRow, String auKey) {
HashMap rowMap = new HashMap();
String label = (isNext ? "Next" : "Previous") + " (" +
(startRow + 1) + "-" + (startRow + defaultNumRows) + ")";
StatusTable.Reference link =
new StatusTable.Reference(label, AU_STATUS_TABLE_NAME, auKey);
link.setProperty("skiprows", Integer.toString(startRow));
link.setProperty("numrows", Integer.toString(defaultNumRows));
rowMap.put("NodeName", link);
rowMap.put("sort", new Integer(isNext ? Integer.MAX_VALUE : -1));
return rowMap;
}
private String getTitle(String key) {
return "Status of AU: " + key;
}
private List getSummaryInfo(ArchivalUnit au, AuState state,
NodeState topNode) {
List summaryList = ListUtil.list(
new StatusTable.SummaryInfo("Volume" , ColumnDescriptor.TYPE_STRING,
au.getName()),
// new StatusTable.SummaryInfo("Nodes", ColumnDescriptor.TYPE_INT,
// new Integer(-1)),
new StatusTable.SummaryInfo("Content Size",
ColumnDescriptor.TYPE_INT,
new Long(AuUtil.getAuContentSize(au))),
new StatusTable.SummaryInfo("Disk Usage (MB)",
ColumnDescriptor.TYPE_FLOAT,
new Float(AuUtil.getAuContentSize(au) /
(float)(1024 * 1024))),
new StatusTable.SummaryInfo("Status",
ColumnDescriptor.TYPE_STRING,
(topNode.hasDamage()
? DAMAGE_STATE_DAMAGED
: DAMAGE_STATE_OK)),
new StatusTable.SummaryInfo("Available From Publisher",
ColumnDescriptor.TYPE_STRING,
(AuUtil.isPubDown(au) ? "No" : "Yes")),
// new StatusTable.SummaryInfo("Volume Complete",
// ColumnDescriptor.TYPE_STRING,
// (AuUtil.isClosed(au) ? "Yes" : "No")),
new StatusTable.SummaryInfo("Last Crawl Time",
ColumnDescriptor.TYPE_DATE,
new Long(state.getLastCrawlTime())),
new StatusTable.SummaryInfo("Last Top-level Poll",
ColumnDescriptor.TYPE_DATE,
new Long(state.getLastTopLevelPollTime())),
new StatusTable.SummaryInfo("Last Treewalk",
ColumnDescriptor.TYPE_DATE,
new Long(state.getLastTreeWalkTime())),
new StatusTable.SummaryInfo("Current Activity",
ColumnDescriptor.TYPE_STRING,
"-")
);
return summaryList;
}
// utility method for making a Reference
public static StatusTable.Reference makeAuRef(Object value,
String key) {
StatusTable.Reference ref =
new StatusTable.Reference(value, AU_STATUS_TABLE_NAME, key);
// ref.setProperty("numrows", Integer.toString(defaultNumRows));
return ref;
}
}
abstract static class PeersAgreement extends PerAuTable {
protected static final List sortRules =
ListUtil.list(new StatusTable.SortRule("Cache", true));
PeersAgreement(LockssDaemon theDaemon) {
super(theDaemon);
}
protected Map makeRow(CacheStats stats) {
Map rowMap = new HashMap();
PeerIdentity peer = stats.peer;
Object id = peer.getIdString();
if (peer.isLocalIdentity()) {
StatusTable.DisplayedValue val =
new StatusTable.DisplayedValue(id);
val.setBold(true);
id = val;
}
rowMap.put("Cache", id);
return rowMap;
}
static class CacheStats {
PeerIdentity peer;
int totalPolls = 0;
int agreePolls = 0;
Vote lastAgree;
long lastAgreeTime = 0;
Vote lastDisagree;
long lastDisagreeTime = 0;
CacheStats(PeerIdentity peer) {
this.peer = peer;
}
boolean isLastAgree() {
return (lastAgreeTime != 0 &&
(lastDisagreeTime == 0 || lastAgreeTime >= lastDisagreeTime));
}
}
}
static class PeerVoteSummary extends PeersAgreement {
private static final List columnDescriptors = ListUtil.list(
new ColumnDescriptor("Cache", "Cache",
ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("Last", "Last",
ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("Polls", "Polls",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("Agree", "Agree",
ColumnDescriptor.TYPE_INT),
new ColumnDescriptor("LastAgree", "Last Agree",
ColumnDescriptor.TYPE_DATE),
new ColumnDescriptor("LastDisagree", "Last Disagree",
ColumnDescriptor.TYPE_DATE)
);
PeerVoteSummary(LockssDaemon theDaemon) {
super(theDaemon);
}
protected String getTitle(ArchivalUnit au) {
return "All caches voting on AU: " + au.getName();
}
protected void populateTable(StatusTable table, ArchivalUnit au)
throws StatusService.NoSuchTableException {
NodeManager nodeMan = theDaemon.getNodeManager(au);
table.setTitle(getTitle(au));
int totalPeers = 0;
int totalAgreement = 0;
if (!table.getOptions().get(StatusTable.OPTION_NO_ROWS)) {
table.setColumnDescriptors(columnDescriptors);
table.setDefaultSortRules(sortRules);
Map statsMap = buildCacheStats(au, nodeMan);
List rowL = new ArrayList();
for (Iterator iter = statsMap.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry)iter.next();
PeerIdentity peer = (PeerIdentity)entry.getKey();
CacheStats stats = (CacheStats)entry.getValue();
if (! peer.isLocalIdentity()) {
totalPeers++;
if (stats.isLastAgree()) {
totalAgreement++;
}
}
Map row = makeRow(stats);
rowL.add(row);
}
table.setRows(rowL);
}
table.setSummaryInfo(getSummaryInfo(au, totalPeers, totalAgreement));
}
public Map buildCacheStats(ArchivalUnit au, NodeManager nodeMan) {
Map statsMap = new HashMap();
NodeState node = nodeMan.getNodeState(au.getAuCachedUrlSet());
for (Iterator history_it = node.getPollHistories();
history_it.hasNext(); ) {
PollHistory history = (PollHistory)history_it.next();
long histTime = history.getStartTime();
for (Iterator votes_it = history.getVotes(); votes_it.hasNext(); ) {
Vote vote = (Vote)votes_it.next();
PeerIdentity peer = vote.getVoterIdentity();
CacheStats stats = (CacheStats)statsMap.get(peer);
if (stats == null) {
stats = new CacheStats(peer);
statsMap.put(peer, stats);
}
stats.totalPolls++;
if (vote.isAgreeVote()) {
stats.agreePolls++;
if (stats.lastAgree == null ||
histTime > stats.lastAgreeTime) {
stats.lastAgree = vote;
stats.lastAgreeTime = histTime;
}
} else {
if (stats.lastDisagree == null ||
histTime > stats.lastDisagreeTime) {
stats.lastDisagree = vote;
stats.lastDisagreeTime = histTime;
}
}
}
}
return statsMap;
}
protected Map makeRow(CacheStats stats) {
Map rowMap = super.makeRow(stats);
rowMap.put("Last",
stats.isLastAgree() ? "Agree" : "Disagree");
rowMap.put("Polls", new Long(stats.totalPolls));
rowMap.put("Agree", new Long(stats.agreePolls));
rowMap.put("LastAgree", new Long(stats.lastAgreeTime));
rowMap.put("LastDisagree", new Long(stats.lastDisagreeTime));
return rowMap;
}
protected List getSummaryInfo(ArchivalUnit au,
int totalPeers, int totalAgreement) {
List summaryList = ListUtil.list(
new StatusTable.SummaryInfo("Peers voting on AU",
ColumnDescriptor.TYPE_INT,
new Integer(totalPeers)),
new StatusTable.SummaryInfo("Agreeing peers",
ColumnDescriptor.TYPE_INT,
new Integer(totalAgreement))
);
return summaryList;
}
// utility method for making a Reference
public static StatusTable.Reference makeAuRef(Object value,
String key) {
return new StatusTable.Reference(value, PEERS_VOTE_TABLE_NAME,
key);
}
}
static class PeerRepair extends PeersAgreement {
private static final List columnDescriptors = ListUtil.list(
new ColumnDescriptor("Cache", "Cache",
ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("Last", "Complete Consensus",
ColumnDescriptor.TYPE_STRING),
new ColumnDescriptor("LastAgree",
"Last Complete Consensus",
ColumnDescriptor.TYPE_DATE),
new ColumnDescriptor("LastDisagree",
"Last Partial Disagreement",
ColumnDescriptor.TYPE_DATE)
);
PeerRepair(LockssDaemon theDaemon) {
super(theDaemon);
}
protected String getTitle(ArchivalUnit au) {
return "Repair candidates for AU: " + au.getName();
}
private static final String FOOT_TITLE =
"These caches have proven to us that they have (or had) a correct \n" +
"copy of this AU. We will fetch repairs from them if necessary, \n" +
"and they may fetch repairs from us.";
protected void populateTable(StatusTable table, ArchivalUnit au)
throws StatusService.NoSuchTableException {
IdentityManager idMgr = theDaemon.getIdentityManager();
table.setTitle(getTitle(au));
table.setTitleFootnote(FOOT_TITLE);
int totalPeers = 0;
if (!table.getOptions().get(StatusTable.OPTION_NO_ROWS)) {
table.setColumnDescriptors(columnDescriptors);
table.setDefaultSortRules(sortRules);
Map statsMap = buildCacheStats(au, idMgr);
List rowL = new ArrayList();
for (Iterator iter = statsMap.entrySet().iterator(); iter.hasNext();) {
Map.Entry entry = (Map.Entry)iter.next();
PeerIdentity peer = (PeerIdentity)entry.getKey();
CacheStats stats = (CacheStats)entry.getValue();
if (! peer.isLocalIdentity()) {
totalPeers++;
}
Map row = makeRow(stats);
rowL.add(row);
}
table.setRows(rowL);
}
table.setSummaryInfo(getSummaryInfo(au, totalPeers));
}
public Map buildCacheStats(ArchivalUnit au, IdentityManager idMgr) {
Map statsMap = new HashMap();
for (Iterator iter = idMgr.getIdentityAgreements(au).iterator();
iter.hasNext(); ) {
IdentityManager.IdentityAgreement ida =
(IdentityManager.IdentityAgreement)iter.next();
try {
PeerIdentity pid = idMgr.stringToPeerIdentity(ida.getId());
if (ida.getLastAgree() > 0) { // only add those that have agreed
CacheStats stats = new CacheStats(pid);
statsMap.put(pid, stats);
stats.lastAgreeTime = ida.getLastAgree();
stats.lastDisagreeTime = ida.getLastDisagree();
}
} catch (IdentityManager.MalformedIdentityKeyException e) {
logger.warning("Malformed id key in IdentityAgreement", e);
continue;
}
}
return statsMap;
}
protected Map makeRow(CacheStats stats) {
Map rowMap = super.makeRow(stats);
rowMap.put("Last", stats.isLastAgree() ? "Yes" : "No");
rowMap.put("LastAgree", new Long(stats.lastAgreeTime));
rowMap.put("LastDisagree", new Long(stats.lastDisagreeTime));
return rowMap;
}
protected List getSummaryInfo(ArchivalUnit au,
int totalPeers) {
List summaryList = ListUtil.list(
new StatusTable.SummaryInfo("Peers holding AU",
ColumnDescriptor.TYPE_INT,
new Integer(totalPeers)),
new StatusTable.SummaryInfo("Peers",
ColumnDescriptor.TYPE_STRING,
PeerVoteSummary.makeAuRef("Voting on AU", au.getAuId()))
);
return summaryList;
}
// utility method for making a Reference
public static StatusTable.Reference makeAuRef(Object value,
String key) {
return new StatusTable.Reference(value, PEERS_REPAIR_TABLE_NAME,
key);
}
}
}
|
package au.id.chenery.mapyrus.dataset;
import java.awt.geom.Rectangle2D;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.LineNumberReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.Hashtable;
import java.util.StringTokenizer;
import java.util.ArrayList;
import au.id.chenery.mapyrus.*;
/**
* Implements reading of geographic datasets from a delimited text file
* with one geometry plus its attributes per line.
* Suitable for reading comma separated file or other simplistic file formats.
*/
public class TextfileDataset implements GeographicDataset
{
/*
* File we are reading from.
* Process handle to external process we are reading from.
*/
private LineNumberReader mReader;
private String mFilename;
private Process mProcess;
/*
* Names of fields and their types, read from header at start of file.
*/
private String mFieldNames[];
private int mFieldTypes[];
/*
* Indices of fields making up geometry. For example, Lon, Lat values
* or X1, Y1, X2, Y2 values.
*/
private int mGeometryFieldIndexes[];
/*
* Field separators. Normally a comma or keyword 'whitespace' (meaning anything
* blank).
*/
private String mDelimiters;
/*
* String that denotes comment lines in text file. These lines
* are ignored.
*/
private String mComment;
/*
* Area being queried.
*/
private Rectangle2D.Double mQueryExtents;
/*
* Static field type lookup table for easy lookup.
*/
private static Hashtable mFieldTypeLookup;
static
{
mFieldTypeLookup = new Hashtable();
mFieldTypeLookup.put("number", new Integer(Argument.NUMERIC));
mFieldTypeLookup.put("numeric", new Integer(Argument.NUMERIC));
mFieldTypeLookup.put("int", new Integer(Argument.NUMERIC));
mFieldTypeLookup.put("integer", new Integer(Argument.NUMERIC));
mFieldTypeLookup.put("real", new Integer(Argument.NUMERIC));
mFieldTypeLookup.put("double", new Integer(Argument.NUMERIC));
mFieldTypeLookup.put("float", new Integer(Argument.NUMERIC));
}
/*
* Read next line from file, skipping comment lines.
*/
private String readLine() throws IOException
{
String s;
do
{
s = mReader.readLine();
}
while (s != null && s.startsWith(mComment));
return(s);
}
/**
* Create string tokenizer to split line using delimiters set for this file.
* @param str is string to be split into tokens.
* @param delimiters are field delimiters, if null then whitespace is used.
*/
private StringTokenizer createStringTokenizer(String str, String delimiters)
{
StringTokenizer retval;
if (delimiters == null)
retval = new StringTokenizer(str);
else
retval = new StringTokenizer(str, delimiters);
return(retval);
}
/**
* Open text file containing geographic data for querying.
* @param filename name of text file to open.
* @param extras options specific to text file datasets, given as var=value pairs.
* @param geometryFieldNames comma separated list of names of fields containing geometry.
*/
public TextfileDataset(String filename, String extras, String []geometryFieldNames)
throws FileNotFoundException, IOException, MapyrusException
{
String header, fieldType;
StringTokenizer st;
ArrayList list;
int i, j;
Integer fType;
String token;
boolean foundGeometryField;
String fieldNames = null, fieldTypes = null;
String fieldNameDelimiters = null, fieldTypeDelimiters = null;
String nextLine;
BufferedReader bufferedReader;
/*
* Check if we should start a program and read its output instead
* of just reading from a file.
*/
if (filename.endsWith("|"))
{
String command = filename.substring(0, filename.length() - 1).trim();
mProcess = Runtime.getRuntime().exec(command);
bufferedReader = new BufferedReader(new InputStreamReader(mProcess.getInputStream()));
}
else
{
bufferedReader = new BufferedReader(new FileReader(filename));
}
mReader = new LineNumberReader(bufferedReader);
mFilename = filename;
/*
* Set default options. Then see if user wants to override any of them.
*/
mDelimiters = null;
mComment = "
st = new StringTokenizer(extras);
while (st.hasMoreTokens())
{
token = st.nextToken();
if (token.startsWith("comment="))
mComment = token.substring(8);
else if (token.startsWith("delimiters="))
mDelimiters = token.substring(11);
else if (token.startsWith("fieldnames="))
fieldNames = token.substring(11);
else if (token.startsWith("fieldtypes="))
fieldTypes = token.substring(11);
}
/*
* If field names and types not given then read them from first two lines
* of file. Then skip more lines until we get to start of data.
*/
if (fieldNames != null)
{
fieldNameDelimiters = ",";
}
else
{
fieldNames = readLine();
fieldNameDelimiters = mDelimiters;
if (fieldNames == null)
throw new MapyrusException("Unexpected end of file in '" + filename + "'");
}
if (fieldTypes != null)
{
fieldTypeDelimiters = ",";
}
else
{
fieldTypes = readLine();
fieldTypeDelimiters = mDelimiters;
if (fieldNames == null)
throw new MapyrusException("Unexpected end of file in '" + filename + "'");
}
st = createStringTokenizer(fieldNames, fieldNameDelimiters);
list = new ArrayList();
while (st.hasMoreTokens())
{
list.add((String)st.nextToken());
}
mFieldNames = new String[list.size()];
for (i = 0; i < mFieldNames.length; i++)
mFieldNames[i] = (String)list.get(i);
mFieldTypes = new int[mFieldNames.length];
st = createStringTokenizer(fieldTypes, fieldTypeDelimiters);
i = 0;
while (st.hasMoreTokens() && i < mFieldTypes.length)
{
fieldType = st.nextToken();
fieldType = fieldType.toLowerCase();
fType = (Integer)mFieldTypeLookup.get(fieldType);
if (fType == null)
mFieldTypes[i] = Argument.STRING;
else
mFieldTypes[i] = fType.intValue();
i++;
}
/*
* Make sure name and type given for each field.
*/
if (i != mFieldTypes.length || st.hasMoreTokens())
{
throw new MapyrusException("Different number of field " +
"names and field types in '" + filename + "'");
}
/*
* Caller must give us field names containing geometry.
* We cannot possibly work it out for ourselves here.
*/
if (geometryFieldNames.length < 2)
{
throw new MapyrusException("Names of fields in dataset containing geometry required");
}
/*
* Find indexes of fields caller says are the geometry for each row.
*/
list.clear();
for (i = 0; i < geometryFieldNames.length; i++)
{
foundGeometryField = false;
for (j = 0; j < mFieldNames.length && foundGeometryField == false; j++)
{
if (geometryFieldNames[i].equalsIgnoreCase(mFieldNames[j]))
{
if (mFieldTypes[j] == Argument.STRING)
{
throw new MapyrusException("Field '" + mFieldNames[j] +
"' is wrong type for geometry in file '" +
filename + "'");
}
list.add(new Integer(j));
foundGeometryField = true;
}
}
if (foundGeometryField == false)
{
throw new MapyrusException("Geometry field '" +
mFieldNames[j] + "' not found in file '" + filename + "'");
}
}
/*
* Save array of fields combined to make geometry for each row.
*/
mGeometryFieldIndexes = new int[list.size()];
for (i = 0; i < list.size(); i++)
mGeometryFieldIndexes[i] = ((Integer)(list.get(i))).intValue();
}
/**
* Returns projection of dataset, which is not defined for a text file.
* @return string "undef".
*/
public String getProjection()
{
return("undef");
}
/**
* @see net.sourceforge.mapyrus.GeographicDataset#getMetadata()
*/
public Hashtable getMetadata()
{
return(new Hashtable());
}
/**
* Return names of fields in this text file.
* @return fieldnames.
*/
public String[] getFieldNames()
{
return(mFieldNames);
}
/**
* Return types of fields in this text file.
* @return field types.
*/
public int[] getFieldTypes()
{
return(mFieldTypes);
}
/**
* Return indexes of geometry fields in list of field names.
* @return list of field indexes.
*/
public int[] getGeometryFieldIndexes()
{
return(mGeometryFieldIndexes);
}
/**
* Return extents in text file. These are not known until the whole
* file is scanned.
* @return degree values covering the whole world
*/
public Rectangle2D.Double getWorlds()
{
return new Rectangle2D.Double(-180.0, -90.0, 180.0, 90.0);
}
/**
* Begins a query on a text file dataset.
* @param extents the area of interest for the query. Geometry outside
* these extents will be skipped.
* @param resolution is hint for minimum distance between coordinate values.
*/
public void query(Rectangle2D.Double extents, double resolution)
throws MapyrusException
{
mQueryExtents = extents;
}
/**
* Read next row from file and split it into fields. Build fields into Row structure.
*/
private boolean readNextRow(Row row) throws MapyrusException
{
int i, geometryFieldIndex;
StringTokenizer st;
String message = null;
String fieldValue, nextLine;
Argument field;
/*
* Need next line from file.
*/
try
{
nextLine = readLine();
}
catch (IOException e)
{
throw new MapyrusException("Error reading file '" + mFilename +
"': " +e.getMessage());
}
/*
* Return EOF status if no more lines available in file.
*/
if (nextLine == null)
return(false);
/*
* Split line into fields and build a row to be returned.
*/
row.clear();
i = geometryFieldIndex = 0;
st = createStringTokenizer(nextLine, mDelimiters);
while (st.hasMoreTokens() && i < mFieldNames.length)
{
fieldValue = st.nextToken();
if (mFieldTypes[i] == Argument.NUMERIC)
{
try
{
field = new Argument(Double.parseDouble(fieldValue));
}
catch (NumberFormatException e)
{
throw new MapyrusException("Invalid numeric value '" +
fieldValue + "' in file " + mFilename + " line " +
mReader.getLineNumber());
}
}
else
{
field = new Argument(Argument.STRING, fieldValue);
}
row.add(field);
i++;
}
/*
* Make sure we read the correct number of fields.
*/
if (i != mFieldNames.length)
{
throw new MapyrusException("Missing fields in file " +
mFilename + " line " + mReader.getLineNumber());
}
return(true);
}
/**
* Gets next row from file that contains geometry crossing the query extents.
* @return next row read, or null if no row found.
*/
public Row fetch() throws MapyrusException
{
boolean retval;
boolean rowOutside;
int i;
Argument x, y;
boolean left, right, bottom, top, inX, inY;
boolean insideExtents = false;
int outcode;
boolean nextRowAvailable;
Row row = new Row();
do
{
nextRowAvailable = readNextRow(row);
if (nextRowAvailable)
{
/*
* Walk through points in geometry and see whether
* they cross the area we are querying.
*/
left = right = bottom = top = inX = inY = false;
for (i = 0; i < mGeometryFieldIndexes.length; i += 2)
{
x = (Argument)(row.get(i));
y = (Argument)(row.get(i + 1));
outcode = mQueryExtents.outcode(x.getNumericValue(),
y.getNumericValue());
if ((outcode & Rectangle2D.OUT_LEFT) != 0)
left = true;
else if ((outcode & Rectangle2D.OUT_RIGHT) != 0)
right = true;
else
inX = true;
if ((outcode & Rectangle2D.OUT_BOTTOM) != 0)
bottom = true;
else if ((outcode & Rectangle2D.OUT_TOP) != 0)
top = true;
else
inY = true;
}
/*
* Geometry inside query extents if there is a point inside extents
* (or a point both sides of the extents) in both X and Y axes.
*/
insideExtents = (inX || (left && right)) && (inY || (bottom && top));
}
}
while (nextRowAvailable && (!insideExtents));
if (insideExtents)
return(row);
else
{
/*
* We've read all of external program's output, now wait for
* it to terminate.
*/
if (mProcess != null)
{
try
{
mProcess.waitFor();
}
catch (InterruptedException e)
{
throw new MapyrusException(e.getMessage());
}
}
return(null);
}
}
}
|
package org.ohmage.service;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import org.ohmage.domain.MobilityPoint;
import org.ohmage.domain.MobilityPoint.LocationStatus;
import org.ohmage.domain.MobilityPoint.Mode;
import org.ohmage.domain.MobilityPoint.SensorData;
import org.ohmage.domain.MobilityPoint.SensorData.WifiData;
import org.ohmage.exception.DataAccessException;
import org.ohmage.exception.ServiceException;
import org.ohmage.query.IUserMobilityQueries;
import edu.ucla.cens.mobilityclassifier.Classification;
import edu.ucla.cens.mobilityclassifier.MobilityClassifier;
/**
* This class is responsible for all services pertaining to Mobility points.
*
* @author John Jenkins
*/
public final class MobilityServices {
private static MobilityServices instance;
private IUserMobilityQueries userMobilityQueries;
private MobilityServices(IUserMobilityQueries iUserMobilityQueries) {
if(instance != null) {
throw new IllegalStateException("An instance of this class already exists.");
}
if(iUserMobilityQueries == null) {
throw new IllegalArgumentException("An instance of IUserMobilityQueries is required.");
}
userMobilityQueries = iUserMobilityQueries;
instance = this;
}
/**
* @return Returns the singleton instance of this class.
*/
public static MobilityServices instance() {
return instance;
}
/**
* Adds the Mobility point to the database.
*
* @param mobilityPoints A list of Mobility points to be added to the
* database.
*
* @throws ServiceException Thrown if there is an error.
*/
public void createMobilityPoint(final String username,
final String client, final List<MobilityPoint> mobilityPoints)
throws ServiceException {
if(username == null) {
throw new ServiceException("The username cannot be null.");
}
else if(client == null) {
throw new ServiceException("The client cannot be null.");
}
try {
for(MobilityPoint mobilityPoint : mobilityPoints) {
userMobilityQueries.createMobilityPoint(username, client, mobilityPoint);
}
}
catch(DataAccessException e) {
throw new ServiceException(e);
}
}
/**
* Runs the classifier against all of the Mobility points in the list.
*
* @param mobilityPoints The Mobility points that are to be classified by
* the server.
*
* @throws ServiceException Thrown if there is an error with the
* classification service.
*/
public void classifyData(final List<MobilityPoint> mobilityPoints) throws ServiceException {
// If the list is empty, just exit.
if(mobilityPoints == null) {
return;
}
// Create a new classifier.
MobilityClassifier classifier = new MobilityClassifier();
// Create place holders for the previous data.
String previousSensorData = null;
String previousWifiMode = null;
// For each of the Mobility points,
for(MobilityPoint mobilityPoint : mobilityPoints) {
// If the data point is of type error, don't attempt to classify
if(mobilityPoint.getMode().equals(Mode.ERROR)) {
continue;
}
// If the SubType is sensor data,
if(MobilityPoint.SubType.SENSOR_DATA.equals(mobilityPoint.getSubType())) {
SensorData currSensorData = mobilityPoint.getSensorData();
WifiData wifiData = currSensorData.getWifiData();
String wifiDataString;
if(wifiData == null) {
wifiDataString = null;
}
else {
wifiDataString = wifiData.toJson().toString();
}
// Classify the data.
Classification classification =
classifier.classify(
mobilityPoint.getSamples(),
currSensorData.getSpeed(),
wifiDataString,
previousSensorData,
previousWifiMode);
// Update the place holders for the previous data.
previousSensorData = wifiDataString;
previousWifiMode = classification.getWifiMode();
// If the classification generated some results, pull them out
// and store them in the Mobility point.
if(classification.hasFeatures()) {
try {
mobilityPoint.setClassifierData(
classification.getFft(),
classification.getVariance(),
classification.getVariance(),
classification.getAverage(),
MobilityPoint.Mode.valueOf(classification.getMode().toUpperCase()));
}
catch(IllegalArgumentException e) {
throw new ServiceException(
"There was a problem reading the classification's information.",
e);
}
}
// If the features don't exist, then create the classifier data
// with only the mode.
else {
try {
mobilityPoint.setClassifierModeOnly(MobilityPoint.Mode.valueOf(classification.getMode().toUpperCase()));
}
catch(IllegalArgumentException e) {
throw new ServiceException(
"There was a problem reading the classification's mode.",
e);
}
}
}
}
}
/**
* Retrieves the information about all of the Mobility points that satisify
* the parameters. The username is required as that is how Mobility points
* are referenced; however, all other parameters are optional and limit the
* results based on their value.<br />
* <br />
* For example, if only a username is given, the result is all of the
* Mobility points for that user. If the username and start date are given,
* then all of the Mobility points made by that user after that date are
* returned. If the username, start date, and end date are all given, the
* result is the list of Mobility points made by that user on or after the
* start date and on or before the end date.
*
* @param username The username of the user whose points are being queried.
* Required.
*
* @param client A client value that uploaded the point. Optional.
*
* @param startDate A date to which all returned points must be on or
* after. Optional.
*
* @param endDate A date to which all returned points must be on or before.
* Optional.
*
* @param privacyState A privacy state to limit the results to only those
* with this privacy state. Optional.
*
* @param locationStatus A location status to limit the results to only
* those with this location status. Optional.
*
* @param mode A mode to limit the results to only those with this mode.
* Optional.
*
* @return A list of MobilityInformation objects where each object
* represents a single Mobility point that satisfies the
* parameters.
*
* @throws ServiceException Thrown if there is an error.
*/
public List<MobilityPoint> retrieveMobilityData(
final String username, final String client,
final Date startDate, final Date endDate,
final MobilityPoint.PrivacyState privacyState,
final LocationStatus locationStatus, final Mode mode)
throws ServiceException {
try {
// Create the IDs list and set it to null. Once we find a non-null
// parameter, we will set the list to that parameter's value.
List<String> mobilityIds = null;
// If both start and end date are non-null, get the IDs from their
// intersection; otherwise, try and get the IDs from the one that
// isn't null if either are non-null.
if((startDate != null) && (endDate != null)) {
mobilityIds = userMobilityQueries.getIdsCreatedBetweenDates(username, startDate, endDate);
}
else {
if(startDate != null) {
mobilityIds = userMobilityQueries.getIdsCreatedAfterDate(username, startDate);
}
else if(endDate != null) {
mobilityIds = userMobilityQueries.getIdsCreatedBeforeDate(username, endDate);
}
}
if(client != null) {
if(mobilityIds == null) {
mobilityIds = userMobilityQueries.getIdsForClient(username, client);
}
else {
mobilityIds.retainAll(userMobilityQueries.getIdsForClient(username, client));
}
}
if(privacyState != null) {
if(mobilityIds == null) {
mobilityIds = userMobilityQueries.getIdsWithPrivacyState(username, privacyState);
}
else {
mobilityIds.retainAll(userMobilityQueries.getIdsWithPrivacyState(username, privacyState));
}
}
if(locationStatus != null) {
if(mobilityIds == null) {
mobilityIds = userMobilityQueries.getIdsWithLocationStatus(username, locationStatus);
}
else {
mobilityIds.retainAll(userMobilityQueries.getIdsWithLocationStatus(username, locationStatus));
}
}
if(mode != null) {
if(mobilityIds == null) {
mobilityIds = userMobilityQueries.getIdsWithMode(username, mode);
}
else {
mobilityIds.retainAll(userMobilityQueries.getIdsWithMode(username, mode));
}
}
if((mobilityIds == null) || (mobilityIds.size() == 0)) {
return Collections.emptyList();
}
else {
return userMobilityQueries.getMobilityInformationFromIds(mobilityIds);
}
}
catch(DataAccessException e) {
throw new ServiceException(e);
}
}
}
|
package org.openid4java.message;
import org.openid4java.association.Association;
import org.openid4java.server.RealmVerifier;
import org.openid4java.OpenIDException;
import java.util.List;
import java.util.Arrays;
import java.util.Iterator;
import java.net.URL;
import java.net.MalformedURLException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
/**
* @author Marius Scurtescu, Johnny Bufu
*/
public class AuthRequest extends Message
{
private static Log _log = LogFactory.getLog(AuthRequest.class);
private static final boolean DEBUG = _log.isDebugEnabled();
public static final String MODE_SETUP = "checkid_setup";
public static final String MODE_IMMEDIATE = "checkid_immediate";
public static final String SELECT_ID =
"http://specs.openid.net/auth/2.0/identifier_select";
protected final static List requiredFields = Arrays.asList( new String[] {
"openid.mode"
});
protected final static List optionalFields = Arrays.asList( new String[] {
"openid.ns",
"openid.claimed_id",
"openid.identity",
"openid.assoc_handle",
"openid.realm",
"openid.trust_root",
"openid.return_to"
});
private RealmVerifier _realmVerifier;
protected AuthRequest(String claimedId, String delegate, boolean compatibility,
String returnToUrl, String handle, RealmVerifier verifier)
{
this(claimedId, delegate, compatibility,
returnToUrl, handle, returnToUrl, verifier);
}
protected AuthRequest(String claimedId, String delegate, boolean compatibility,
String returnToUrl, String handle, String realm,
RealmVerifier verifier)
{
if (! compatibility)
{
set("openid.ns", OPENID2_NS);
setClaimed(claimedId);
}
setIdentity(delegate);
if ( returnToUrl != null ) setReturnTo(returnToUrl);
if ( realm != null ) setRealm(realm);
if (! Association.FAILED_ASSOC_HANDLE.equals(handle)) setHandle(handle);
setImmediate(false);
_realmVerifier = verifier;
}
protected AuthRequest(ParameterList params)
{
super(params);
}
public static AuthRequest createAuthRequest(String claimedId, String delegate,
boolean compatibility, String returnToUrl,
String handle, RealmVerifier verifier)
throws MessageException
{
return createAuthRequest(claimedId, delegate, compatibility,
returnToUrl, handle, returnToUrl, verifier);
}
public static AuthRequest createAuthRequest(String claimedId, String delegate,
boolean compatibility, String returnToUrl,
String handle, String realm, RealmVerifier verifier)
throws MessageException
{
AuthRequest req = new AuthRequest(claimedId, delegate, compatibility,
returnToUrl, handle, realm, verifier);
req.validate();
if (DEBUG) _log.debug("Created auth request:\n" + req.keyValueFormEncoding());
return req;
}
public static AuthRequest createAuthRequest(ParameterList params,
RealmVerifier realmVerifier)
throws MessageException
{
AuthRequest req = new AuthRequest(params);
req.setRealmVerifier(realmVerifier);
req.validate();
if (DEBUG) _log.debug("Created auth request:\n" + req.keyValueFormEncoding());
return req;
}
public List getRequiredFields()
{
return requiredFields;
}
public void setOPEndpoint(URL opEndpoint)
{
if (opEndpoint != null)
_destinationUrl = opEndpoint.toString();
}
public String getOPEndpoint()
{
return _destinationUrl;
}
public void setImmediate(boolean immediate)
{
set("openid.mode", immediate ? MODE_IMMEDIATE : MODE_SETUP);
if (DEBUG && immediate)
_log.debug("Setting checkid_immediate auth request.");
}
public boolean isImmediate()
{
return MODE_IMMEDIATE.equals(getParameterValue("openid.mode"));
}
public boolean isVersion2()
{
return hasParameter("openid.ns") &&
OPENID2_NS.equals(getParameterValue("openid.ns"));
}
public void setIdentity(String id)
{
set("openid.identity", id);
}
public String getIdentity()
{
return getParameterValue("openid.identity");
}
public void setClaimed(String claimed)
{
set("openid.claimed_id", claimed);
}
public String getClaimed()
{
return getParameterValue("openid.claimed_id");
}
public void setHandle(String handle)
{
set("openid.assoc_handle", handle);
}
public String getHandle()
{
return getParameterValue("openid.assoc_handle");
}
public void setReturnTo(String returnTo)
{
set("openid.return_to", returnTo);
}
public String getReturnTo()
{
return getParameterValue("openid.return_to");
}
public void setRealm(String realm)
{
set(isVersion2() ? "openid.realm" : "openid.trust_root", realm);
}
public String getRealm()
{
if (isVersion2())
return getParameterValue("openid.realm");
else
return getParameterValue("openid.trust_root");
}
/**
* Gets the RealmVerifier used to verify realms against return_to URLs.
*/
public RealmVerifier getRealmVerifier()
{
return _realmVerifier;
}
/**
* Sets the RealmVerifier used to verify realms against return_to URLs.
*/
public void setRealmVerifier(RealmVerifier realmVerifier)
{
this._realmVerifier = realmVerifier;
}
public void validate() throws MessageException
{
super.validate();
boolean compatibility = ! isVersion2();
if ( compatibility && hasParameter("openid.identity") &&
SELECT_ID.equals(getParameterValue("openid.identity")))
{
throw new MessageException(SELECT_ID + " not supported in OpenID1",
OpenIDException.AUTH_ERROR);
}
if ( hasParameter("openid.mode") &&
! MODE_SETUP.equals(getParameterValue("openid.mode")) &&
! MODE_IMMEDIATE.equals(getParameterValue("openid.mode")))
{
throw new MessageException(
"Invalid openid.mode value in auth request: "
+ getParameterValue("openid.mode"),
OpenIDException.AUTH_ERROR);
}
// return_to must be a valid URL, if present
try
{
if (getReturnTo() != null)
new URL(getReturnTo());
} catch (MalformedURLException e)
{
throw new MessageException(
"Error verifying return URL in auth request.",
OpenIDException.AUTH_ERROR, e);
}
if ( ! hasParameter("openid.return_to") )
{
if (compatibility)
{
throw new MessageException(
"openid.return_to is mandatory in OpenID1 auth requests",
OpenIDException.AUTH_ERROR);
}
else if ( ! hasParameter("openid.realm") )
{
throw new MessageException(
"openid.realm is mandatory if return_to is absent.",
OpenIDException.AUTH_REALM_ERROR);
}
}
if ( compatibility && hasParameter("openid.realm") )
{
_log.warn("openid.realm should not be present in OpenID1 auth requests");
}
if ( !compatibility && hasParameter("openid.trust_root") )
{
_log.warn("openid.trust_root should not be present in OpenID2 auth requests.");
}
// figure out if 'claimed_id' and 'identity' are optional
if ( ! hasParameter("openid.identity") )
{
// not optional in v1
if (compatibility)
{
throw new MessageException(
"openid.identity is required in OpenID1 auth requests",
OpenIDException.AUTH_ERROR);
}
boolean hasAuthProvider = false;
Iterator iter = getExtensions().iterator();
while (iter.hasNext())
{
String typeUri = iter.next().toString();
try
{
MessageExtension extension = getExtension(typeUri);
if (extension.providesIdentifier())
{
hasAuthProvider = true;
break;
}
}
catch (MessageException ignore)
{
// do nothing
}
}
// no extension provides authentication sevices - invalid message
if ( !hasAuthProvider )
{
throw new MessageException(
"no identifier specified in auth request",
OpenIDException.AUTH_ERROR);
}
// claimed_id must be present if and only if identity is present
if ( hasParameter("openid.claimed_id") )
{
throw new MessageException(
"openid.claimed_id must be present if and only if " +
"openid.identity is present.",
OpenIDException.AUTH_ERROR);
}
}
else if ( ! compatibility && ! hasParameter("openid.claimed_id") )
{
throw new MessageException(
"openid.clamied_id must be present in OpenID2 auth requests",
OpenIDException.AUTH_ERROR);
}
if (getRealm() != null)
{
int validation = _realmVerifier.validate(
getRealm(), getReturnTo(), compatibility);
if ( RealmVerifier.OK != validation )
{
throw new MessageException("Realm verification failed (" +
validation + ") for: " + getRealm(),
OpenIDException.AUTH_REALM_ERROR);
}
}
}
}
|
package org.openqa.selenium.example;
import com.gargoylesoftware.htmlunit.BrowserVersion;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.firefox.FirefoxDriver;
public class Example {
public static void main(String[] args) {
// Create a new instance of the html unit driver
// Notice that the remainder of the code relies on the interface,
// not the implementation.
// WHEN RUNNING FROM WINDOWS
//System.setProperty("webdriver.chrome.driver", "C:\\Users\\miburi\\IdeaProjects\\chromedriver.exe");
// WHEN RUNNING FROM MAC
System.setProperty("webdriver.chrome.driver", "/Users/devadmin/IdeaProjects/seleniumTest/chromedriver");
System.out.println("testing");
//testing only chromedriver
WebDriver driverC = new ChromeDriver();
//WebDriver driverF = new FirefoxDriver();
//WebDriver driver = new HtmlUnitDriver();
java.util.logging.Logger.getLogger("com.gargoylesoftware.htmlunit").setLevel(java.util.logging.Level.OFF);
java.util.logging.Logger.getLogger("org.apache.http").setLevel(java.util.logging.Level.OFF);
// And now use this to visit Google
driverC.get("http:
System.out.println("The title from Chrome is " + driverC.getTitle());
try {
// Find the text input element by its name
//the searchbox on zillow
WebElement searchbox = driverC.findElement(By.id("citystatezip"));
searchbox.sendKeys("Orlando, Florida");
WebElement clickBox = driverC.findElement(By.xpath("//button[@type='submit']"));
clickBox.click();
System.out.println("DEBUG 1");
long end = System.currentTimeMillis() + 5000;
WebElement listingsMenu;
while (System.currentTimeMillis() < end) {
// Browsers which render content (such as Firefox and IE) return "RenderedWebElements"
System.out.println("Keep refreshing");
listingsMenu = driverC.findElement(By.id("listings-menu-label"));
// If results have been returned, the results are displayed in a drop down.
if (listingsMenu.isDisplayed()) {
listingsMenu.click();
break;
}
}
System.out.println("DEBUG 2");
homeTypesReset(driverC);
Thread.sleep(5000);
}catch(InterruptedException e)
{
System.out.println("Exception" + e);
}
driverC.quit();
//driver.quit();
}
//Passes the webdriver and resets all the home types
public static void homeTypesReset(WebDriver driver){
String is_active="listing-type selected";
|
package org.owasp.esapi;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.List;
import org.owasp.esapi.errors.ValidationException;
public class ValidationErrorList {
/**
* Error list of ValidationException's
*/
private Hashtable errorList = new Hashtable();
/**
* Adds a new error to list with a unique named context.
* No action taken if either element is null.
* Existing contexts will be overwritten.
*
* @param context unique named context for this ValidationErrorList
* @param ve
*/
public void addError(String context, ValidationException ve) {
if ((context != null) && (ve != null)) {
errorList.put(context, ve);
}
}
/**
* Returns list of ValidationException, or empty list of no errors exist.
*
* @return List
*/
public List errors() {
ArrayList validationExceptionList = new ArrayList(errorList.size());
for (Enumeration e = errorList.elements() ; e.hasMoreElements() ;) {
validationExceptionList.add((ValidationException)e.nextElement());
}
return validationExceptionList;
}
/**
* Retrieves ValidationException for given context if one exists.
*
* @param context unique name for each error
* @return ValidationException or null for given context
*/
public ValidationException getError(String context) {
if (context == null) return null;
Object returnValue = errorList.get(context);
if (returnValue == null) return null;
return (ValidationException)returnValue;
}
/**
* Returns true if no error are present.
*
* @return boolean
*/
public boolean isEmpty() {
if (errorList.size() == 0) return true;
return false;
}
/**
* Returns the numbers of errors present.
*
* @return boolean
*/
public int size() {
return errorList.size();
}
}
|
package org.pentaho.xul.swt.toolbar;
import org.pentaho.xul.XulItem;
import org.pentaho.xul.XulObject;
import org.pentaho.xul.toolbar.XulToolbar;
import org.pentaho.xul.toolbar.XulToolbox;
public class Toolbox extends XulObject implements XulToolbox {
public XulToolbar getToolbarById(String id) {
// TODO Auto-generated method stub
return null;
}
public String[] getToolbarIds() {
// TODO Auto-generated method stub
return null;
}
public String getId() {
// TODO Auto-generated method stub
return null;
}
public XulItem getParent() {
// TODO Auto-generated method stub
return null;
}
public void setId(String id) {
// TODO Auto-generated method stub
}
public void setParent(XulItem parent) {
// TODO Auto-generated method stub
}
}
|
package org.smof.collection;
import java.util.Map;
import org.bson.BsonDocument;
import org.bson.BsonObjectId;
import org.bson.types.ObjectId;
import org.smof.element.Element;
import org.smof.exception.SmofException;
import org.smof.field.PrimaryField;
import org.smof.field.SmofField;
import org.smof.parsers.SmofParser;
@SuppressWarnings("javadoc")
public class SmofUpdateQuery<T extends Element> {
private static void handleError(Throwable cause) {
throw new SmofException(cause);
}
private final BsonDocument update;
private final SmofCollection<T> collection;
private BsonDocument filter;
private final SmofOpOptions options;
private final SmofParser parser;
private final Map<String, PrimaryField> fields;
private final Class<T> type;
SmofUpdateQuery(BsonDocument update, SmofCollection<T> collection, SmofOpOptions options, Map<String, PrimaryField> fields) {
super();
this.update = update;
this.collection = collection;
filter = new BsonDocument();
this.options = options;
this.parser = collection.getParser();
this.fields = fields;
this.type = collection.getType();
}
private SmofField validateFieldName(String fieldName) {
final PrimaryField field = fields.get(fieldName);
if(field == null) {
handleError(new IllegalArgumentException(fieldName + " is not a valid field name for type " + type.getName()));
}
return field;
}
public SmofUpdateQuery<T> fieldEq(String fieldName, Object value) {
final SmofField field = validateFieldName(fieldName);
filter.append(fieldName, parser.toBson(value, field));
return this;
}
public void idEq(ObjectId id) {
idEq(new BsonObjectId(id));
}
public void idEq(BsonObjectId id) {
filter = new BsonDocument(Element.ID, id);
execute();
}
public void execute() {
collection.execUpdate(filter, update, options);
parser.reset();
}
}
|
package org.usfirst.frc.team236.robot;
import org.usfirst.frc.team236.robot.commands.ShiftDown;
import org.usfirst.frc.team236.robot.commands.ShootCycle;
import org.usfirst.frc.team236.robot.commands.autonomous.BackwardRawtonomous;
import org.usfirst.frc.team236.robot.commands.autonomous.DoNothing;
import org.usfirst.frc.team236.robot.commands.autonomous.ForwardRawtonomous;
import org.usfirst.frc.team236.robot.commands.autonomous.LowBarRawtonomous;
import org.usfirst.frc.team236.robot.commands.profiled.CrossLowBar;
import org.usfirst.frc.team236.robot.commands.profiled.Reach;
import org.usfirst.frc.team236.robot.subsystems.Arm;
import org.usfirst.frc.team236.robot.subsystems.Drive;
import org.usfirst.frc.team236.robot.subsystems.Hanger;
import org.usfirst.frc.team236.robot.subsystems.Intake;
import org.usfirst.frc.team236.robot.subsystems.Shooter;
import com.kauailabs.navx.frc.AHRS;
import edu.wpi.first.wpilibj.CameraServer;
import edu.wpi.first.wpilibj.Compressor;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.IterativeRobot;
import edu.wpi.first.wpilibj.SPI;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.command.Scheduler;
import edu.wpi.first.wpilibj.livewindow.LiveWindow;
import edu.wpi.first.wpilibj.smartdashboard.SendableChooser;
import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard;
import edu.wpi.first.wpilibj.vision.USBCamera;
import motionProfile.Profile;
import updater.Updater;
public class Robot extends IterativeRobot {
// Subsystems
public static Drive drive;
public static Intake intake;
public static Arm arm;
public static Shooter shooter;
public static Hanger hanger;
public static OI oi;
Command autonomousCommand;
SendableChooser chooser;
CameraServer server;
USBCamera camera;
Compressor compressor;
public static AHRS navx;
// Profiles
public Profile crossLowBar;
public Profile reach;
public Profile toShoot;
public Profile toLowGoal;
public Profile bigCross;
public void robotInit() {
// Create subsystems
drive = new Drive();
intake = new Intake();
arm = new Arm();
shooter = new Shooter();
oi = new OI();
// Create motion profiles
crossLowBar = new Profile(AutoMap.crossLowBar);
reach = new Profile(AutoMap.reach);
toShoot = new Profile(AutoMap.toShoot);
toLowGoal = new Profile(AutoMap.toLowGoal);
// Pick an auto
chooser = new SendableChooser();
chooser.addDefault("Do Nothing", new DoNothing());
chooser.addObject("Low Bar", new LowBarRawtonomous());
chooser.addObject("Low Bar (profile)", new CrossLowBar(crossLowBar));
chooser.addObject("Reach", new Reach(reach));
chooser.addObject("Forward Rawto", new ForwardRawtonomous());
chooser.addObject("Backward Rawto", new BackwardRawtonomous());
SmartDashboard.putData("Auto mode", chooser);
// Start camera stream
camera = new USBCamera();
server = CameraServer.getInstance();
server.setQuality(40);
server.startAutomaticCapture(camera);
// Start compressor
compressor = new Compressor();
compressor.start();
navx = new AHRS(SPI.Port.kMXP);
}
public void disabledInit() {
SmartDashboard.putData("Auto mode", chooser);
}
public void disabledPeriodic() {
Scheduler.getInstance().run();
if (arm.getBottomLimit()) {
arm.zeroEncoder();
}
SmartDashboard.putNumber("Left", drive.getLeftDistance());
SmartDashboard.putNumber("Right", drive.getRightDistance());
}
public void autonomousInit() {
drive.zeroEncoders();
arm.setSetpointRelative(0);
autonomousCommand = ((Command) chooser.getSelected());
if (autonomousCommand != null) {
autonomousCommand.start();
}
Updater.getInstance().initControllers();
}
public void autonomousPeriodic() {
Scheduler.getInstance().run();
Updater.getInstance().updateAll();
SmartDashboard.putNumber("Velocity", drive.leftSide.getSpeed());
SmartDashboard.putNumber("Left", drive.getLeftDistance());
SmartDashboard.putNumber("Right", drive.getRightDistance());
SmartDashboard.putNumber("Arm angle", arm.getRawEncoder());
}
public void teleopInit() {
if (autonomousCommand != null) {
autonomousCommand.cancel();
}
drive.zeroEncoders();
arm.setSetpointRelative(0);
}
public void teleopPeriodic() {
Scheduler.getInstance().run();
SmartDashboard.putNumber("Match Time", DriverStation.getInstance().getMatchTime());
SmartDashboard.putNumber("Arm angle", arm.getRawEncoder());
SmartDashboard.putNumber("Left", drive.getLeftDistance());
SmartDashboard.putNumber("Right", drive.getRightDistance());
SmartDashboard.putBoolean("Bottom Limit", arm.getBottomLimit());
SmartDashboard.putBoolean("Upper Limit", arm.getUpperLimit());
SmartDashboard.putNumber("Velocity", drive.leftSide.getSpeed());
SmartDashboard.putBoolean("Ball", intake.getLimit());
// Hackish way to shoot with both triggers
if (oi.leftStick.getRawButton(1) && oi.rightStick.getRawButton(1)) {
new ShootCycle().start();
}
// Hackish way to ensure we end in low gear
if (DriverStation.getInstance().getMatchTime() <= 1) {
new ShiftDown().start();
}
}
public void testPeriodic() {
}
public void testInit() {
arm.setSetpointRelative(0);
LiveWindow.addActuator("Arm", "Arm", arm.getPIDController());
}
}
|
package org.ktunaxa.referral.client.gui;
import com.smartgwt.client.types.Overflow;
import com.smartgwt.client.types.VisibilityMode;
import com.smartgwt.client.widgets.layout.SectionStack;
import com.smartgwt.client.widgets.layout.SectionStackSection;
import com.smartgwt.client.widgets.layout.VLayout;
import org.geomajas.gwt.client.command.AbstractCommandCallback;
import org.geomajas.gwt.client.command.GwtCommand;
import org.geomajas.gwt.client.command.GwtCommandDispatcher;
import org.geomajas.gwt.client.map.feature.Feature;
import org.geomajas.gwt.client.map.layer.VectorLayer;
import org.ktunaxa.referral.client.widget.AbstractCollapsibleListBlock;
import org.ktunaxa.referral.server.command.dto.GetTasksRequest;
import org.ktunaxa.referral.server.command.dto.GetTasksResponse;
import org.ktunaxa.referral.server.dto.TaskDto;
import java.util.ArrayList;
import java.util.List;
/**
* Panel to display unassigned tasks.
*
* @author Joachim Van der Auwera
*/
public class UnassignedTasksPanel extends VLayout {
// Candidate group titles, positions need to match {@link #CANDIDATE_CHECKS}
private static final String[] CANDIDATE_TITLES = {
"Aquatic evaluator",
"Archaeology evaluator",
"Community manager",
"Cultural evaluator",
"Ecology evaluator",
"Evaluate evaluator",
"Referral manager",
"Treaty evaluator"
};
// Candidate group string to test, positions need to match {@link #CANDIDATE_TITLES}
private static final String[] CANDIDATE_CHECKS = {
"aquatic",
"archaeology",
"community",
"cultural",
"ecology",
"evaluate",
"referral",
"treaty"
};
// index of the referralManager role in the candidate lists
private static final int MANAGER = 6;
private SectionStackSection[] sections = new SectionStackSection[CANDIDATE_CHECKS.length];
private TaskListView[] views = new TaskListView[CANDIDATE_CHECKS.length];
private List<AbstractCollapsibleListBlock<TaskDto>>[] lists = new List[CANDIDATE_CHECKS.length];
public UnassignedTasksPanel() {
super();
setWidth100();
SectionStack groups = new SectionStack();
groups.setSize("100%", "100%");
groups.setOverflow(Overflow.AUTO);
groups.setVisibilityMode(VisibilityMode.MULTIPLE);
groups.setPadding(5);
addMember(groups);
for (int i = 0 ; i < CANDIDATE_CHECKS.length ; i++) {
sections[i] = new SectionStackSection(CANDIDATE_TITLES[i]);
views[i] = new TaskListView();
lists[i] = new ArrayList<AbstractCollapsibleListBlock<TaskDto>>();
sections[i].addItem(views[i]);
groups.addSection(sections[i]); // @todo @sec only add when the role is assigned to the user
}
}
public void init(VectorLayer referralLayer, Feature referral) {
// nothing to do for now
}
@Override
public void show() {
super.show();
for (int i = 0 ; i < CANDIDATE_CHECKS.length ; i++) {
lists[i].clear();
views[i].populate(lists[i]);
sections[i].setTitle(CANDIDATE_TITLES[i]);
sections[i].setExpanded(false);
}
GetTasksRequest request = new GetTasksRequest();
request.setIncludeUnassignedTasks(true);
GwtCommand command = new GwtCommand(GetTasksRequest.COMMAND);
command.setCommandRequest(request);
GwtCommandDispatcher.getInstance().execute(command, new AbstractCommandCallback<GetTasksResponse>() {
public void execute(GetTasksResponse response) {
for (int i = 0 ; i < CANDIDATE_CHECKS.length ; i++) {
lists[i].clear(); // clear again to avoid double AJAX calls causing duplicates
}
for (TaskDto task : response.getTasks()) {
TaskBlock block = new TaskBlock(task);
String candidates = task.getCandidates().toString();
boolean added = false;
for (int i = 0 ; i < CANDIDATE_CHECKS.length ; i++) {
if (candidates.contains(CANDIDATE_CHECKS[i])) {
lists[i].add(block);
added = true;
}
}
if (!added) {
lists[MANAGER].add(block);
}
}
int sectionToExpand = 0;
int sectionsToExpandCount = 0;
for (int i = 0 ; i < CANDIDATE_CHECKS.length ; i++) {
int count = lists[i].size();
if (count > 0) {
sections[i].setTitle(CANDIDATE_TITLES[i] +
" (<span style=\"font-weight:bold;\">" + count + "</span>)");
sectionToExpand = i;
sectionsToExpandCount++;
}
views[i].populate(lists[i]); // @todo @sec only add when the role is assigned to the user
}
if (1 == sectionsToExpandCount) {
sections[sectionToExpand].setExpanded(true);
}
}
});
}
}
|
package org.jboss.as.messaging.jms.bridge;
import static org.jboss.as.controller.descriptions.ModelDescriptionConstants.OP_ADDR;
import static org.jboss.as.server.Services.addServerExecutorDependency;
import java.util.List;
import java.util.Properties;
import org.hornetq.jms.bridge.ConnectionFactoryFactory;
import org.hornetq.jms.bridge.DestinationFactory;
import org.hornetq.jms.bridge.JMSBridge;
import org.hornetq.jms.bridge.QualityOfServiceMode;
import org.hornetq.jms.bridge.impl.JMSBridgeImpl;
import org.hornetq.jms.bridge.impl.JNDIConnectionFactoryFactory;
import org.hornetq.jms.bridge.impl.JNDIDestinationFactory;
import org.jboss.as.controller.AbstractAddStepHandler;
import org.jboss.as.controller.AttributeDefinition;
import org.jboss.as.controller.OperationContext;
import org.jboss.as.controller.OperationFailedException;
import org.jboss.as.controller.OperationStepHandler;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.ServiceVerificationHandler;
import org.jboss.as.controller.SimpleAttributeDefinition;
import org.jboss.as.messaging.MessagingServices;
import org.jboss.as.messaging.jms.SelectorAttribute;
import org.jboss.as.naming.deployment.ContextNames;
import org.jboss.as.txn.service.TxnServices;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.Property;
import org.jboss.msc.service.ServiceBuilder;
import org.jboss.msc.service.ServiceController;
import org.jboss.msc.service.ServiceController.Mode;
import org.jboss.msc.service.ServiceName;
public class JMSBridgeAdd extends AbstractAddStepHandler {
public static final JMSBridgeAdd INSTANCE = new JMSBridgeAdd();
private JMSBridgeAdd() {
}
@Override
protected void populateModel(final ModelNode operation, final ModelNode model) throws OperationFailedException {
for (final AttributeDefinition attributeDefinition : JMSBridgeDefinition.JMS_BRIDGE_ATTRIBUTES) {
attributeDefinition.validateAndSet(operation, model);
}
for (final AttributeDefinition attributeDefinition : JMSBridgeDefinition.JMS_TARGET_ATTRIBUTES) {
attributeDefinition.validateAndSet(operation, model);
}
for (final AttributeDefinition attributeDefinition : JMSBridgeDefinition.JMS_SOURCE_ATTRIBUTES) {
attributeDefinition.validateAndSet(operation, model);
}
}
@Override
protected void performRuntime(final OperationContext context, final ModelNode operation, final ModelNode model,
final ServiceVerificationHandler verificationHandler, final List<ServiceController<?>> newControllers)
throws OperationFailedException {
context.addStep(new OperationStepHandler() {
@Override
public void execute(OperationContext context, ModelNode operation) throws OperationFailedException {
final PathAddress address = PathAddress.pathAddress(operation.get(OP_ADDR));
String moduleName = resolveAttribute(JMSBridgeDefinition.MODULE, context, model);
final JMSBridge bridge = createJMSBridge(context, model);
final String bridgeName = address.getLastElement().getValue();
final JMSBridgeService bridgeService = new JMSBridgeService(moduleName, bridgeName, bridge);
final ServiceName bridgeServiceName = MessagingServices.getJMSBridgeServiceName(bridgeName);
final ServiceBuilder<JMSBridge> jmsBridgeServiceBuilder = context.getServiceTarget().addService(bridgeServiceName, bridgeService)
.addListener(verificationHandler)
.addDependency(TxnServices.JBOSS_TXN_TRANSACTION_MANAGER)
.setInitialMode(Mode.ACTIVE);
addServerExecutorDependency(jmsBridgeServiceBuilder, bridgeService.getExecutorInjector(), false);
if (dependsOnLocalResources(model, JMSBridgeDefinition.SOURCE_CONTEXT)) {
addDependencyForJNDIResource(jmsBridgeServiceBuilder, model, context, JMSBridgeDefinition.SOURCE_CONNECTION_FACTORY);
addDependencyForJNDIResource(jmsBridgeServiceBuilder, model, context, JMSBridgeDefinition.SOURCE_DESTINATION);
}
if (dependsOnLocalResources(model, JMSBridgeDefinition.TARGET_CONTEXT)) {
addDependencyForJNDIResource(jmsBridgeServiceBuilder, model, context, JMSBridgeDefinition.TARGET_CONNECTION_FACTORY);
addDependencyForJNDIResource(jmsBridgeServiceBuilder, model, context, JMSBridgeDefinition.TARGET_DESTINATION);
}
newControllers.add(jmsBridgeServiceBuilder.install());
context.completeStep();
}
}, OperationContext.Stage.RUNTIME);
}
private boolean dependsOnLocalResources(ModelNode model, AttributeDefinition attr) throws OperationFailedException {
// if either the source or target context attribute is not defined, this means that the JMS resources will be looked up
// from the local HornetQ server.
return !(model.hasDefined(attr.getName()));
}
private void addDependencyForJNDIResource(final ServiceBuilder<JMSBridge> builder, final ModelNode model, final OperationContext context,
final AttributeDefinition attribute) throws OperationFailedException {
String jndiName = attribute.resolveModelAttribute(context, model).asString();
builder.addDependency(ContextNames.bindInfoFor(jndiName).getBinderServiceName());
}
private JMSBridge createJMSBridge(OperationContext context, ModelNode model) throws OperationFailedException {
final Properties sourceContextProperties = resolveContextProperties(JMSBridgeDefinition.SOURCE_CONTEXT, context, model);
final String sourceConnectionFactoryName = JMSBridgeDefinition.SOURCE_CONNECTION_FACTORY.resolveModelAttribute(context, model).asString();
final ConnectionFactoryFactory sourceCff = new JNDIConnectionFactoryFactory(sourceContextProperties , sourceConnectionFactoryName);
final String sourceDestinationName = JMSBridgeDefinition.SOURCE_DESTINATION.resolveModelAttribute(context, model).asString();
final DestinationFactory sourceDestinationFactory = new JNDIDestinationFactory(sourceContextProperties, sourceDestinationName);
final Properties targetContextProperties = resolveContextProperties(JMSBridgeDefinition.TARGET_CONTEXT, context, model);
final String targetConnectionFactoryName = JMSBridgeDefinition.TARGET_CONNECTION_FACTORY.resolveModelAttribute(context, model).asString();
final ConnectionFactoryFactory targetCff = new JNDIConnectionFactoryFactory(targetContextProperties, targetConnectionFactoryName);
final String targetDestinationName = JMSBridgeDefinition.TARGET_DESTINATION.resolveModelAttribute(context, model).asString();
final DestinationFactory targetDestinationFactory = new JNDIDestinationFactory(targetContextProperties, targetDestinationName);
final String sourceUsername = resolveAttribute(JMSBridgeDefinition.SOURCE_USER, context, model);
final String sourcePassword = resolveAttribute(JMSBridgeDefinition.SOURCE_PASSWORD, context, model);
final String targetUsername = resolveAttribute(JMSBridgeDefinition.TARGET_USER, context, model);
final String targetPassword = resolveAttribute(JMSBridgeDefinition.TARGET_PASSWORD, context, model);
final String selector = resolveAttribute(SelectorAttribute.SELECTOR, context, model);
final long failureRetryInterval = JMSBridgeDefinition.FAILURE_RETRY_INTERVAL.resolveModelAttribute(context, model).asLong();
final int maxRetries = JMSBridgeDefinition.MAX_RETRIES.resolveModelAttribute(context, model).asInt();
final QualityOfServiceMode qosMode = QualityOfServiceMode.valueOf( JMSBridgeDefinition.QUALITY_OF_SERVICE.resolveModelAttribute(context, model).asString());
final int maxBatchSize = JMSBridgeDefinition.MAX_BATCH_SIZE.resolveModelAttribute(context, model).asInt();
final long maxBatchTime = JMSBridgeDefinition.MAX_BATCH_TIME.resolveModelAttribute(context, model).asLong();
final String subName = resolveAttribute(JMSBridgeDefinition.SUBSCRIPTION_NAME, context, model);
final String clientID = resolveAttribute(JMSBridgeDefinition.CLIENT_ID, context, model);
final boolean addMessageIDInHeader = JMSBridgeDefinition.ADD_MESSAGE_ID_IN_HEADER.resolveModelAttribute(context, model).asBoolean();
return new JMSBridgeImpl(sourceCff,
targetCff,
sourceDestinationFactory,
targetDestinationFactory,
sourceUsername,
sourcePassword,
targetUsername,
targetPassword,
selector,
failureRetryInterval,
maxRetries,
qosMode,
maxBatchSize,
maxBatchTime,
subName,
clientID,
addMessageIDInHeader);
}
private Properties resolveContextProperties(AttributeDefinition attribute, OperationContext context, ModelNode model) throws OperationFailedException {
final ModelNode contextModel = attribute.resolveModelAttribute(context, model);
if (!contextModel.isDefined()) {
return null;
}
final Properties contextProperties = new Properties();
for (Property property : contextModel.asPropertyList()) {
contextProperties.put(property.getName(), property.getValue().asString());
}
return contextProperties;
}
/**
* Return null if the resolved attribute is not defined
*/
private String resolveAttribute(SimpleAttributeDefinition attr, OperationContext context, ModelNode model) throws OperationFailedException {
final ModelNode node = attr.resolveModelAttribute(context, model);
return node.isDefined() ? node.asString() : null;
}
}
|
package org.openrdf.server.metadata;
import info.aduna.iteration.Iterations;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Request;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.ext.Providers;
import org.openrdf.model.Literal;
import org.openrdf.model.Resource;
import org.openrdf.model.Statement;
import org.openrdf.model.URI;
import org.openrdf.model.impl.URIImpl;
import org.openrdf.repository.RepositoryException;
import org.openrdf.repository.object.ObjectConnection;
import com.sun.jersey.api.NotFoundException;
import com.sun.jersey.api.core.ResourceContext;
import eu.medsea.util.MimeUtil;
public class DataResource extends SubResource {
private static final String NAMESPACE = "http://www.openrdf.org/rdf/2009/meta
private static URI MEDIA_TYPE = new URIImpl(NAMESPACE + "mediaType");
private static URI REDIRECT = new URIImpl(NAMESPACE + "redirect");
public DataResource(Request request, ResourceContext ctx,
Providers providers, File file, ObjectConnection con, URI uri,
MultivaluedMap<String, String> params) {
super(request, ctx, providers, file, con, uri, params);
}
public ResponseBuilder get() throws RepositoryException {
ResponseBuilder rb;
List<Statement> redirect;
if (file.canRead()) {
Date last = new Date(file.lastModified());
rb = request.evaluatePreconditions(last);
if (rb == null) {
rb = Response.ok();
rb.lastModified(last);
rb.type(getContentType());
rb.entity(file);
}
} else if (file.exists()) {
return methodNotAllowed();
} else if (!(redirect = con.getStatements(uri, REDIRECT, null).asList())
.isEmpty()) {
String obj = redirect.get(0).getObject().stringValue();
rb = Response.status(307).location(java.net.URI.create(obj));
} else if (con.hasStatement((Resource) null, null, null, uri)) {
java.net.URI loc = java.net.URI.create(uri.stringValue()
+ "?named-graph");
rb = Response.status(302).location(loc);
} else if (con.hasStatement(uri, null, null)) {
java.net.URI loc = java.net.URI.create(uri.stringValue()
+ "?describe");
rb = Response.status(303).location(loc);
} else {
throw new NotFoundException("Not Found <" + uri.stringValue() + ">");
}
return rb;
}
public ResponseBuilder put(HttpHeaders headers, InputStream in)
throws IOException, RepositoryException {
ResponseBuilder rb;
long lastModified = file.lastModified();
rb = request.evaluatePreconditions(new Date(lastModified));
if (rb != null)
return rb;
List<String> loc = headers.getRequestHeader("Content-Location");
if (headers.getMediaType() == null && loc != null) {
con.remove(uri, REDIRECT, null);
con.add(uri, REDIRECT, vf.createURI(loc.get(0)));
con.setAutoCommit(true);
return Response.noContent();
}
try {
// TODO use file locks to prevent conflicts
File dir = file.getParentFile();
dir.mkdirs();
File tmp = new File(dir, file.getName() + ".part");
OutputStream out = new FileOutputStream(tmp);
try {
byte[] buf = new byte[512];
int read;
while ((read = in.read(buf)) >= 0) {
out.write(buf, 0, read);
}
if (!tmp.renameTo(file)) {
tmp.delete();
return methodNotAllowed();
}
MultivaluedMap<String, String> map = headers.getRequestHeaders();
String contentType = map.getFirst("Content-Type");
if (contentType != null) {
con.remove(uri, MEDIA_TYPE, null);
con.add(uri, MEDIA_TYPE, vf.createLiteral(contentType));
con.setAutoCommit(true);
}
return Response.noContent();
} finally {
out.close();
}
} catch (FileNotFoundException e) {
return methodNotAllowed();
}
}
public ResponseBuilder delete() throws RepositoryException {
if (!file.exists())
throw new NotFoundException("Not Found");
Date last = new Date(file.lastModified());
ResponseBuilder rb = request.evaluatePreconditions(last);
if (rb != null)
return rb;
if (!file.delete())
return methodNotAllowed();
con.remove(uri, MEDIA_TYPE, null);
con.remove(uri, REDIRECT, null);
con.setAutoCommit(true);
return Response.noContent();
}
public Set<String> getAllowedMethods() throws RepositoryException {
Set<String> set = new LinkedHashSet<String>();
if (file.canRead()) {
set.add("GET");
set.add("HEAD");
}
File parent = file.getParentFile();
if (file.canWrite() || !file.exists()
&& (!parent.exists() || parent.canWrite())) {
set.add("PUT");
}
if (file.exists() && parent.canWrite()) {
set.add("DELETE");
}
MetaResource meta = new MetaResource(request, ctx, providers, file,
con, uri, params);
Set<String> allowed = meta.getAllowedMethods();
if (allowed.contains("POST")) {
set.add("POST");
}
return set;
}
private String getContentType() throws RepositoryException {
List<Statement> types = Iterations.asList(con.getStatements(uri,
MEDIA_TYPE, null, true));
for (Statement st : types) {
return st.getObject().stringValue();
}
String mimeType = MimeUtil.getMagicMimeType(file);
if (mimeType == null)
return MediaType.APPLICATION_OCTET_STREAM;
con.setAutoCommit(false);
try {
types = Iterations.asList(con.getStatements(uri, MEDIA_TYPE, null,
true));
for (Statement st : types) {
return st.getObject().stringValue();
}
Literal lit = con.getValueFactory().createLiteral(mimeType);
con.add(uri, MEDIA_TYPE, lit);
con.setAutoCommit(true);
} finally {
if (!con.isAutoCommit()) {
con.rollback();
con.setAutoCommit(true);
}
}
return mimeType;
}
}
|
package org.openhab.habdroid.ui;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.support.v4.app.ListFragment;
import android.text.TextUtils;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ListView;
import org.json.JSONException;
import org.json.JSONObject;
import org.openhab.habdroid.R;
import org.openhab.habdroid.model.OpenHABItem;
import org.openhab.habdroid.model.OpenHABNFCActionList;
import org.openhab.habdroid.model.OpenHABWidget;
import org.openhab.habdroid.model.OpenHABWidgetDataSource;
import org.openhab.habdroid.util.Constants;
import org.openhab.habdroid.util.MyAsyncHttpClient;
import org.openhab.habdroid.util.MyHttpClient;
import org.openhab.habdroid.util.Util;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.io.StringReader;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import okhttp3.Call;
import okhttp3.Headers;
/**
* This class is apps' main fragment which displays list of openHAB
* widgets from sitemap page with further navigation through sitemap and everything else!
*/
public class OpenHABWidgetListFragment extends ListFragment {
private static final String TAG = OpenHABWidgetListFragment.class.getSimpleName();
private OnWidgetSelectedListener widgetSelectedListener;
// Datasource, providing list of openHAB widgets
private OpenHABWidgetDataSource openHABWidgetDataSource;
// List adapter for list view of openHAB widgets
private OpenHABWidgetAdapter openHABWidgetAdapter;
// Url of current sitemap page displayed
// Url of current sitemap page displayed
private String displayPageUrl;
// sitemap root url
private String sitemapRootUrl = "";
// openHAB base url
private String openHABBaseUrl = "http://demo.openhab.org:8080/";
// List of widgets to display
private ArrayList<OpenHABWidget> widgetList = new ArrayList<OpenHABWidget>();
// Username/password for authentication
private String openHABUsername = "";
private String openHABPassword = "";
// selected openhab widget
private OpenHABWidget selectedOpenHABWidget;
// widget Id which we got from nfc tag
private String nfcWidgetId;
// widget command which we got from nfc tag
private String nfcCommand;
// auto close app after nfc action is complete
private boolean nfcAutoClose = false;
// parent activity
private OpenHABMainActivity mActivity;
// loopj
private MyAsyncHttpClient mAsyncHttpClient;
// Am I visible?
private boolean mIsVisible = false;
private OpenHABWidgetListFragment mTag;
private int mCurrentSelectedItem = -1;
private int mPosition;
private int mOldSelectedItem = -1;
private String mAtmosphereTrackingId;
//handlers will reconnect the network during outages
private Handler networkHandler = new Handler();
private Runnable networkRunnable;
// keeps track of current request to cancel it in onPause
private Call mRequestHandle;
@Override
public void onCreate(Bundle savedInstanceState) {
Log.d(TAG, "onCreate()");
Log.d(TAG, "isAdded = " + isAdded());
mTag = this;
super.onCreate(savedInstanceState);
if (getArguments() != null) {
displayPageUrl = getArguments().getString("displayPageUrl");
openHABBaseUrl = getArguments().getString("openHABBaseUrl");
sitemapRootUrl = getArguments().getString("sitemapRootUrl");
openHABUsername = getArguments().getString("openHABUsername");
openHABPassword = getArguments().getString("openHABPassword");
mPosition = getArguments().getInt("position");
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
Log.d(TAG, "onActivityCreated()");
Log.d(TAG, "isAdded = " + isAdded());
mActivity = (OpenHABMainActivity)getActivity();
final String iconFormat = PreferenceManager.getDefaultSharedPreferences(mActivity).getString("iconFormatType","PNG");
openHABWidgetDataSource = new OpenHABWidgetDataSource(iconFormat);
openHABWidgetAdapter = new OpenHABWidgetAdapter(getActivity(),
R.layout.openhabwidgetlist_genericitem, widgetList);
getListView().setAdapter(openHABWidgetAdapter);
openHABBaseUrl = mActivity.getOpenHABBaseUrl();
openHABUsername = mActivity.getOpenHABUsername();
openHABPassword = mActivity.getOpenHABPassword();
// We're using atmosphere so create an own client to not block the others
SharedPreferences prefs = PreferenceManager
.getDefaultSharedPreferences(mActivity);
mAsyncHttpClient = new MyAsyncHttpClient(mActivity, prefs.getBoolean(Constants.PREFERENCE_SSLHOST,
false), prefs.getBoolean(Constants.PREFERENCE_SSLCERT, false));
mAsyncHttpClient.setBasicAuth(openHABUsername, openHABPassword);
openHABWidgetAdapter.setOpenHABUsername(openHABUsername);
openHABWidgetAdapter.setOpenHABPassword(openHABPassword);
openHABWidgetAdapter.setOpenHABBaseUrl(openHABBaseUrl);
openHABWidgetAdapter.setAsyncHttpClient(mAsyncHttpClient);
getListView().setOnItemClickListener(new AdapterView.OnItemClickListener() {
public void onItemClick(AdapterView<?> parent, View view, int position,
long id) {
Log.d(TAG, "Widget clicked " + String.valueOf(position));
OpenHABWidget openHABWidget = openHABWidgetAdapter.getItem(position);
if (openHABWidget.hasLinkedPage()) {
// Widget have a page linked to it
String[] splitString;
splitString = openHABWidget.getLinkedPage().getTitle().split("\\[|\\]");
if (OpenHABWidgetListFragment.this.widgetSelectedListener != null) {
widgetSelectedListener.onWidgetSelectedListener(openHABWidget.getLinkedPage(),
OpenHABWidgetListFragment.this);
}
// navigateToPage(openHABWidget.getLinkedPage().getLink(), splitString[0]);
mOldSelectedItem = position;
} else {
Log.d(TAG, String.format("Click on item with no linked page, reverting selection to item %d", mOldSelectedItem));
// If an item without a linked page is clicked this will clear the selection
// and revert it to previously selected item (if any) when CHOICE_MODE_SINGLE
// is switched on for widget listview in multi-column mode on tablets
getListView().clearChoices();
getListView().requestLayout();
getListView().setItemChecked(mOldSelectedItem, true);
}
}
});
getListView().setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
public boolean onItemLongClick(AdapterView<?> parent, View view,
int position, long id) {
Log.d(TAG, "Widget long-clicked " + String.valueOf(position));
OpenHABWidget openHABWidget = openHABWidgetAdapter.getItem(position);
Log.d(TAG, "Widget type = " + openHABWidget.getType());
if (openHABWidget.getType().equals("Switch") || openHABWidget.getType().equals("Selection") ||
openHABWidget.getType().equals("Colorpicker")) {
selectedOpenHABWidget = openHABWidget;
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle(R.string.nfc_dialog_title);
OpenHABNFCActionList nfcActionList = new OpenHABNFCActionList(selectedOpenHABWidget);
builder.setItems(nfcActionList.getNames(), new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
Intent writeTagIntent = new Intent(getActivity().getApplicationContext(),
OpenHABWriteTagActivity.class);
writeTagIntent.putExtra("sitemapPage", displayPageUrl);
writeTagIntent.putExtra("item", selectedOpenHABWidget.getItem().getName());
writeTagIntent.putExtra("itemType", selectedOpenHABWidget.getItem().getType());
OpenHABNFCActionList nfcActionList =
new OpenHABNFCActionList(selectedOpenHABWidget);
writeTagIntent.putExtra("command", nfcActionList.getCommands()[which]);
startActivityForResult(writeTagIntent, 0);
Util.overridePendingTransition(getActivity(), false);
selectedOpenHABWidget = null;
}
});
builder.show();
return true;
}
return true;
}
});
if (getResources().getInteger(R.integer.pager_columns) > 1) {
Log.d(TAG, "More then 1 column, setting selector on");
getListView().setChoiceMode(ListView.CHOICE_MODE_SINGLE);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
Log.d(TAG, "onAttach()");
Log.d(TAG, "isAdded = " + isAdded());
if (activity instanceof OnWidgetSelectedListener) {
widgetSelectedListener = (OnWidgetSelectedListener)activity;
mActivity = (OpenHABMainActivity)activity;
} else {
Log.e("TAG", "Attached to incompatible activity");
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
Log.i(TAG, "onCreateView");
Log.d(TAG, "isAdded = " + isAdded());
return inflater.inflate(R.layout.openhabwidgetlist_fragment, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
Log.d(TAG, "onViewCreated");
Log.d(TAG, "isAdded = " + isAdded());
super.onViewCreated(view, savedInstanceState);
}
@Override
public void onPause () {
super.onPause();
Log.d(TAG, "onPause() " + displayPageUrl);
Log.d(TAG, "isAdded = " + isAdded());
// We only have 1 request running per fragment so
// cancel it if we have it
Thread thread = new Thread(new Runnable(){
@Override
public void run(){
if (mRequestHandle != null) {
mRequestHandle.cancel();
mRequestHandle = null;
}
}
});
thread.start();
if (openHABWidgetAdapter != null) {
openHABWidgetAdapter.stopImageRefresh();
openHABWidgetAdapter.stopVideoWidgets();
}
if (isAdded())
mCurrentSelectedItem = getListView().getCheckedItemPosition();
}
@Override
public void onResume () {
super.onResume();
Log.d(TAG, "onResume() " + displayPageUrl);
Log.d(TAG, "isAdded = " + isAdded());
if (displayPageUrl != null)
showPage(displayPageUrl, false);
}
@Override
public void setUserVisibleHint (boolean isVisibleToUser) {
super.setUserVisibleHint(isVisibleToUser);
mIsVisible = isVisibleToUser;
Log.d(TAG, String.format("isVisibleToUser(%B)", isVisibleToUser));
}
public static OpenHABWidgetListFragment withPage(String pageUrl, String baseUrl, String rootUrl,
String username, String password, int position) {
Log.d(TAG, "withPage(" + pageUrl + ")");
OpenHABWidgetListFragment fragment = new OpenHABWidgetListFragment();
Bundle args = new Bundle();
args.putString("displayPageUrl", pageUrl);
args.putString("openHABBaseUrl", baseUrl);
args.putString("sitemapRootUrl", rootUrl);
args.putString("openHABUsername", username);
args.putString("openHABPassword", password);
args.putInt("position", position);
fragment.setArguments(args);
return fragment;
}
/**
* Loads data from sitemap page URL and passes it to processContent
*
* @param pageUrl an absolute base URL of openHAB sitemap page
* @param longPolling enable long polling when loading page
* @return void
*/
public void showPage(String pageUrl, final boolean longPolling) {
Log.i(TAG, " showPage for " + pageUrl + " longPolling = " + longPolling);
Log.d(TAG, "isAdded = " + isAdded());
// Cancel any existing http request to openHAB (typically ongoing long poll)
if (mRequestHandle != null) {
mRequestHandle.cancel();
mRequestHandle = null;
}
if (!longPolling) {
startProgressIndicator();
this.mAtmosphereTrackingId = null;
}
Map<String, String> headers = new HashMap<String, String>();
if (mActivity.getOpenHABVersion() == 1) {
headers.put("Accept", "application/xml");
}
headers.put("X-Atmosphere-Framework", "1.0");
if (longPolling) {
mAsyncHttpClient.setTimeout(300000);
headers.put("X-Atmosphere-Transport", "long-polling");
if (this.mAtmosphereTrackingId == null) {
headers.put("X-Atmosphere-tracking-id", "0");
} else {
headers.put("X-Atmosphere-tracking-id", this.mAtmosphereTrackingId);
}
} else {
headers.put("X-Atmosphere-tracking-id", "0");
mAsyncHttpClient.setTimeout(10000);
}
mRequestHandle = mAsyncHttpClient.get(pageUrl, headers, new MyHttpClient.ResponseHandler() {
@Override
public void onFailure(Call call, int statusCode, Headers headers, byte[] responseBody, Throwable error) {
if (call.isCanceled()) {
Log.i(TAG, "Call canceled on failure - stop updating");
return;
}
mAtmosphereTrackingId = null;
if (!longPolling)
stopProgressIndicator();
if (error instanceof SocketTimeoutException) {
Log.d(TAG, "Connection timeout, reconnecting");
showPage(displayPageUrl, false);
return;
} else {
/*
* If we get a network error try connecting again, if the
* fragment is paused, the runnable will be removed
*/
Log.e(TAG, error.toString());
Log.e(TAG, String.format("status code = %d", statusCode));
Log.e(TAG, "Connection error = " + error.getClass().toString() + ", cycle aborted");
// networkHandler.removeCallbacks(networkRunnable);
// networkRunnable = new Runnable(){
// @Override
// public void run(){
showPage(displayPageUrl, false);
// networkHandler.postDelayed(networkRunnable, 10 * 1000);
}
}
@Override
public void onSuccess(Call call, int statusCode, Headers headers, byte[] responseBody) {
if (call.isCanceled()) {
Log.i(TAG, "Call canceled on success - stop updating");
return;
}
String id = headers.get("X-Atmosphere-tracking-id");
if (id != null) {
Log.i(TAG, "Found atmosphere tracking id: " + id);
OpenHABWidgetListFragment.this.mAtmosphereTrackingId = id;
}
if (!longPolling)
stopProgressIndicator();
String responseString = new String(responseBody);
processContent(responseString, longPolling);
// Log.d(TAG, responseString);
}
});
}
/**
* Parse XML sitemap page and show it
*
*
* @return void
*/
public void processContent(String responseString, boolean longPolling) {
Log.d(TAG, "processContent() " + this.displayPageUrl);
Log.d(TAG, "isAdded = " + isAdded());
Log.d(TAG, "responseString.length() = " + (responseString != null ? responseString.length() : -1));
// We can receive empty response, probably when no items was changed
// so we needn't process it
if (responseString == null || responseString.length() == 0) {
showPage(displayPageUrl, true);
return;
}
// If openHAB verion = 1 get page from XML
if (mActivity.getOpenHABVersion() == 1) {
// As we change the page we need to stop all videos on current page
// before going to the new page. This is quite dirty, but is the only
// way to do that...
openHABWidgetAdapter.stopVideoWidgets();
openHABWidgetAdapter.stopImageRefresh();
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder builder = dbf.newDocumentBuilder();
Document document = builder.parse(new InputSource(new StringReader(responseString)));
if (document != null) {
Node rootNode = document.getFirstChild();
openHABWidgetDataSource.setSourceNode(rootNode);
widgetList.clear();
for (OpenHABWidget w : openHABWidgetDataSource.getWidgets()) {
// Remove frame widgets with no label text
if (w.getType().equals("Frame") && TextUtils.isEmpty(w.getLabel()))
continue;
widgetList.add(w);
}
} else {
Log.e(TAG, "Got a null response from openHAB");
showPage(displayPageUrl, false);
}
} catch (ParserConfigurationException | SAXException | IOException e) {
Log.d(TAG, "responseString:\n" + String.valueOf(responseString));
Log.e(TAG, e.getMessage(), e);
}
// Later versions work with JSON
} else {
try {
JSONObject pageJson = new JSONObject(responseString);
// In case of a server timeout in the long polling request, nothing is done
// and the request is restarted
if (longPolling && pageJson.has("timeout")
&& pageJson.getString("timeout").equalsIgnoreCase("true")) {
Log.e(TAG, "Server timeout in the long polling request");
showPage(displayPageUrl, true);
return;
}
// As we change the page we need to stop all videos on current page
// before going to the new page. This is quite dirty, but is the only
// way to do that...
openHABWidgetAdapter.stopVideoWidgets();
openHABWidgetAdapter.stopImageRefresh();
openHABWidgetDataSource.setSourceJson(pageJson);
widgetList.clear();
for (OpenHABWidget w : openHABWidgetDataSource.getWidgets()) {
// Remove frame widgets with no label text
if (w.getType().equals("Frame") && TextUtils.isEmpty(w.getLabel()))
continue;
widgetList.add(w);
}
} catch (JSONException e) {
e.printStackTrace();
}
}
openHABWidgetAdapter.notifyDataSetChanged();
if (!longPolling && isAdded()) {
getListView().clearChoices();
Log.d(TAG, String.format("processContent selectedItem = %d", mCurrentSelectedItem));
if (mCurrentSelectedItem >= 0)
getListView().setItemChecked(mCurrentSelectedItem, true);
}
if (getActivity() != null && mIsVisible)
getActivity().setTitle(openHABWidgetDataSource.getTitle());
// Set widget list index to saved or zero position
// This would mean we got widget and command from nfc tag, so we need to do some automatic actions!
if (this.nfcWidgetId != null && this.nfcCommand != null) {
Log.d(TAG, "Have widget and command, NFC action!");
OpenHABWidget nfcWidget = this.openHABWidgetDataSource.getWidgetById(this.nfcWidgetId);
OpenHABItem nfcItem = nfcWidget.getItem();
// Found widget with id from nfc tag and it has an item
if (nfcWidget != null && nfcItem != null) {
// TODO: Perform nfc widget action here
if (this.nfcCommand.equals("TOGGLE")) {
//RollerShutterItem changed to RollerShutter in later builds of OH2
if (nfcItem.getType().startsWith("Rollershutter")) {
if (nfcItem.getStateAsBoolean())
this.openHABWidgetAdapter.sendItemCommand(nfcItem, "UP");
else
this.openHABWidgetAdapter.sendItemCommand(nfcItem, "DOWN");
} else {
if (nfcItem.getStateAsBoolean())
this.openHABWidgetAdapter.sendItemCommand(nfcItem, "OFF");
else
this.openHABWidgetAdapter.sendItemCommand(nfcItem, "ON");
}
} else {
this.openHABWidgetAdapter.sendItemCommand(nfcItem, this.nfcCommand);
}
}
this.nfcWidgetId = null;
this.nfcCommand = null;
if (this.nfcAutoClose) {
getActivity().finish();
}
}
showPage(displayPageUrl, true);
}
private void stopProgressIndicator() {
if (mActivity != null) {
Log.d(TAG, "Stop progress indicator");
mActivity.setProgressIndicatorVisible(false);
}
}
private void startProgressIndicator() {
if (mActivity != null) {
Log.d(TAG, "Start progress indicator");
mActivity.setProgressIndicatorVisible(true);
}
}
private void showAlertDialog(String alertMessage) {
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setMessage(alertMessage)
.setPositiveButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
AlertDialog alert = builder.create();
alert.show();
}
public void setOpenHABUsername(String openHABUsername) {
this.openHABUsername = openHABUsername;
}
public void setOpenHABPassword(String openHABPassword) {
this.openHABPassword = openHABPassword;
}
public void setDisplayPageUrl(String displayPageUrl) {
this.displayPageUrl = displayPageUrl;
}
public String getDisplayPageUrl() {
return displayPageUrl;
}
public String getTitle() {
Log.d(TAG, "getPageTitle()");
if (openHABWidgetDataSource != null)
return openHABWidgetDataSource.getTitle();
return "";
}
public void clearSelection() {
Log.d(TAG, "clearSelection() " + this.displayPageUrl);
Log.d(TAG, "isAdded = " + isAdded());
if (getListView() != null && this.isVisible() && isAdded()) {
getListView().clearChoices();
getListView().requestLayout();
}
}
public int getPosition() {
return mPosition;
}
}
|
package com.haulmont.cuba.core.sys;
import com.haulmont.cuba.core.entity.BaseUuidEntity;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.text.StrTokenizer;
import org.springframework.util.ResourceUtils;
import java.io.File;
/**
* Base class for {@link AppContext} loaders.
*
* @author krivopustov
* @version $Id$
*/
public class AbstractAppContextLoader {
public static final String SPRING_CONTEXT_CONFIG = "cuba.springContextConfig";
protected void afterInitAppProperties() {
BaseUuidEntity.allowSetNotLoadedAttributes =
Boolean.valueOf(AppContext.getProperty("cuba.allowSetNotLoadedAttributes"));
}
protected void beforeInitAppContext() {
}
protected void initAppContext() {
String configProperty = AppContext.getProperty(SPRING_CONTEXT_CONFIG);
if (StringUtils.isBlank(configProperty)) {
throw new IllegalStateException("Missing " + SPRING_CONTEXT_CONFIG + " application property");
}
StrTokenizer tokenizer = new StrTokenizer(configProperty);
String[] locations = tokenizer.getTokenArray();
replaceLocationsFromConf(locations);
CubaClassPathXmlApplicationContext appContext = new CubaClassPathXmlApplicationContext();
appContext.setConfigLocations(locations);
appContext.setValidating(false);
appContext.refresh();
AppContext.setApplicationContext(appContext);
}
protected void replaceLocationsFromConf(String[] locations) {
String confDirProp = AppContext.getProperty("cuba.confDir");
if (confDirProp == null)
throw new IllegalStateException("cuba.confDir app property is not set");
File confDir = new File(confDirProp);
for (int i = 0; i < locations.length; i++) {
String location = locations[i];
if (ResourceUtils.isUrl(location))
continue;
if (location.startsWith("/"))
location = location.substring(1);
File file = new File(confDir, location);
if (file.exists()) {
locations[i] = file.toURI().toString();
}
}
}
protected void afterInitAppContext() {
}
}
|
package de.geeksfactory.opacclient.apis;
import org.apache.http.NameValuePair;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.message.BasicNameValuePair;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.json.JSONException;
import org.json.JSONObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.nodes.TextNode;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import de.geeksfactory.opacclient.apis.OpacApi.MultiStepResult.Status;
import de.geeksfactory.opacclient.i18n.StringProvider;
import de.geeksfactory.opacclient.networking.HttpClientFactory;
import de.geeksfactory.opacclient.networking.NotReachableException;
import de.geeksfactory.opacclient.objects.Account;
import de.geeksfactory.opacclient.objects.AccountData;
import de.geeksfactory.opacclient.objects.Copy;
import de.geeksfactory.opacclient.objects.Detail;
import de.geeksfactory.opacclient.objects.DetailedItem;
import de.geeksfactory.opacclient.objects.Filter;
import de.geeksfactory.opacclient.objects.Filter.Option;
import de.geeksfactory.opacclient.objects.LentItem;
import de.geeksfactory.opacclient.objects.Library;
import de.geeksfactory.opacclient.objects.ReservedItem;
import de.geeksfactory.opacclient.objects.SearchRequestResult;
import de.geeksfactory.opacclient.objects.SearchResult;
import de.geeksfactory.opacclient.objects.SearchResult.MediaType;
import de.geeksfactory.opacclient.searchfields.DropdownSearchField;
import de.geeksfactory.opacclient.searchfields.SearchField;
import de.geeksfactory.opacclient.searchfields.SearchQuery;
import de.geeksfactory.opacclient.searchfields.TextSearchField;
/**
* OpacApi implementation for Web Opacs of the SISIS SunRise product, developed by OCLC.
*
* Restrictions: Bookmarks are only constantly supported if the library uses the BibTip extension.
*/
public class SISIS extends BaseApi implements OpacApi {
protected static HashMap<String, MediaType> defaulttypes = new HashMap<>();
static {
defaulttypes.put("g", MediaType.EBOOK);
defaulttypes.put("d", MediaType.CD);
defaulttypes.put("Buch", MediaType.BOOK);
defaulttypes.put("Bücher", MediaType.BOOK);
defaulttypes.put("Printmedien", MediaType.BOOK);
defaulttypes.put("Zeitschrift", MediaType.MAGAZINE);
defaulttypes.put("Zeitschriften", MediaType.MAGAZINE);
defaulttypes.put("zeitung", MediaType.NEWSPAPER);
defaulttypes.put(
"Einzelband einer Serie, siehe auch übergeordnete Titel",
MediaType.BOOK);
defaulttypes.put("0", MediaType.BOOK);
defaulttypes.put("1", MediaType.BOOK);
defaulttypes.put("2", MediaType.BOOK);
defaulttypes.put("3", MediaType.BOOK);
defaulttypes.put("4", MediaType.BOOK);
defaulttypes.put("5", MediaType.BOOK);
defaulttypes.put("Buch-Kinderbuch", MediaType.BOOK);
defaulttypes.put("6", MediaType.SCORE_MUSIC);
defaulttypes.put("7", MediaType.CD_MUSIC);
defaulttypes.put("8", MediaType.CD_MUSIC);
defaulttypes.put("Tonträger", MediaType.CD_MUSIC);
defaulttypes.put("12", MediaType.CD);
defaulttypes.put("13", MediaType.CD);
defaulttypes.put("CD", MediaType.CD);
defaulttypes.put("DVD", MediaType.DVD);
defaulttypes.put("14", MediaType.CD);
defaulttypes.put("15", MediaType.DVD);
defaulttypes.put("16", MediaType.CD);
defaulttypes.put("audiocd", MediaType.CD);
defaulttypes.put("Film", MediaType.MOVIE);
defaulttypes.put("Filme", MediaType.MOVIE);
defaulttypes.put("17", MediaType.MOVIE);
defaulttypes.put("18", MediaType.MOVIE);
defaulttypes.put("19", MediaType.MOVIE);
defaulttypes.put("20", MediaType.DVD);
defaulttypes.put("dvd", MediaType.DVD);
defaulttypes.put("21", MediaType.SCORE_MUSIC);
defaulttypes.put("Noten", MediaType.SCORE_MUSIC);
defaulttypes.put("22", MediaType.BOARDGAME);
defaulttypes.put("26", MediaType.CD);
defaulttypes.put("27", MediaType.CD);
defaulttypes.put("28", MediaType.EBOOK);
defaulttypes.put("31", MediaType.BOARDGAME);
defaulttypes.put("35", MediaType.MOVIE);
defaulttypes.put("36", MediaType.DVD);
defaulttypes.put("37", MediaType.CD);
defaulttypes.put("29", MediaType.AUDIOBOOK);
defaulttypes.put("41", MediaType.GAME_CONSOLE);
defaulttypes.put("42", MediaType.GAME_CONSOLE);
defaulttypes.put("46", MediaType.GAME_CONSOLE_NINTENDO);
defaulttypes.put("52", MediaType.EBOOK);
defaulttypes.put("56", MediaType.EBOOK);
defaulttypes.put("96", MediaType.EBOOK);
defaulttypes.put("97", MediaType.EBOOK);
defaulttypes.put("99", MediaType.EBOOK);
defaulttypes.put("EB", MediaType.EBOOK);
defaulttypes.put("ebook", MediaType.EBOOK);
defaulttypes.put("buch01", MediaType.BOOK);
defaulttypes.put("buch02", MediaType.PACKAGE_BOOKS);
defaulttypes.put("Medienpaket", MediaType.PACKAGE);
defaulttypes.put("datenbank", MediaType.PACKAGE);
defaulttypes
.put("Medienpaket, Lernkiste, Lesekiste", MediaType.PACKAGE);
defaulttypes.put("buch03", MediaType.BOOK);
defaulttypes.put("buch04", MediaType.PACKAGE_BOOKS);
defaulttypes.put("buch05", MediaType.PACKAGE_BOOKS);
defaulttypes.put("Web-Link", MediaType.URL);
defaulttypes.put("ejournal", MediaType.EDOC);
defaulttypes.put("karte", MediaType.MAP);
}
protected final long SESSION_LIFETIME = 1000 * 60 * 3;
protected String opac_url = "";
protected JSONObject data;
protected String CSId;
protected String identifier;
protected int resultcount = 10;
protected long logged_in;
protected Account logged_in_as;
protected static final String ENCODING = "UTF-8";
public List<SearchField> parseSearchFields() throws IOException,
JSONException {
if (!initialised) {
start();
}
String html = httpGet(opac_url
+ "/search.do?methodToCall=switchSearchPage&SearchType=2",
ENCODING);
Document doc = Jsoup.parse(html);
List<SearchField> fields = new ArrayList<>();
Elements options = doc
.select("select[name=searchCategories[0]] option");
for (Element option : options) {
TextSearchField field = new TextSearchField();
field.setDisplayName(option.text());
field.setId(option.attr("value"));
field.setHint("");
fields.add(field);
}
for (Element dropdown : doc.select("#tab-content select")) {
parseDropdown(dropdown, fields);
}
return fields;
}
private void parseDropdown(Element dropdownElement,
List<SearchField> fields) throws JSONException {
Elements options = dropdownElement.select("option");
DropdownSearchField dropdown = new DropdownSearchField();
if (dropdownElement.parent().select("input[type=hidden]").size() > 0) {
dropdown.setId(dropdownElement.parent()
.select("input[type=hidden]").attr("value"));
dropdown.setData(new JSONObject("{\"restriction\": true}"));
} else {
dropdown.setId(dropdownElement.attr("name"));
dropdown.setData(new JSONObject("{\"restriction\": false}"));
}
for (Element option : options) {
dropdown.addDropdownValue(option.attr("value"), option.text());
}
dropdown.setDisplayName(dropdownElement.parent().select("label").text());
fields.add(dropdown);
}
@Override
public void start() throws
IOException {
// Some libraries require start parameters for start.do, like Login=foo
String startparams = "";
if (data.has("startparams")) {
try {
startparams = "?" + data.getString("startparams");
} catch (JSONException e) {
e.printStackTrace();
}
}
String html = httpGet(opac_url + "/start.do" + startparams, ENCODING);
initialised = true;
Document doc = Jsoup.parse(html);
CSId = doc.select("input[name=CSId]").val();
super.start();
}
@Override
public void init(Library lib, HttpClientFactory httpClientFactory) {
super.init(lib, httpClientFactory);
this.data = lib.getData();
try {
this.opac_url = data.getString("baseurl");
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public SearchRequestResult search(List<SearchQuery> query)
throws IOException, OpacErrorException,
JSONException {
List<NameValuePair> params = new ArrayList<>();
int index = 0;
int restrictionIndex = 0;
start();
params.add(new BasicNameValuePair("methodToCall", "submit"));
params.add(new BasicNameValuePair("CSId", CSId));
params.add(new BasicNameValuePair("methodToCallParameter",
"submitSearch"));
for (SearchQuery entry : query) {
if (entry.getValue().equals("")) {
continue;
}
if (entry.getSearchField() instanceof DropdownSearchField) {
JSONObject data = entry.getSearchField().getData();
if (data.optBoolean("restriction", false)) {
params.add(new BasicNameValuePair("searchRestrictionID["
+ restrictionIndex + "]", entry.getSearchField()
.getId()));
params.add(new BasicNameValuePair(
"searchRestrictionValue1[" + restrictionIndex + "]",
entry.getValue()));
restrictionIndex++;
} else {
params.add(new BasicNameValuePair(entry.getKey(), entry
.getValue()));
}
} else {
if (index != 0) {
params.add(new BasicNameValuePair("combinationOperator["
+ index + "]", "AND"));
}
params.add(new BasicNameValuePair("searchCategories[" + index
+ "]", entry.getKey()));
params.add(new BasicNameValuePair(
"searchString[" + index + "]", entry.getValue()));
index++;
}
}
if (index == 0) {
throw new OpacErrorException(
stringProvider.getString(StringProvider.NO_CRITERIA_INPUT));
}
if (index > 4) {
throw new OpacErrorException(stringProvider.getQuantityString(
StringProvider.LIMITED_NUM_OF_CRITERIA, 4, 4));
}
params.add(new BasicNameValuePair("submitSearch", "Suchen"));
params.add(new BasicNameValuePair("callingPage", "searchParameters"));
params.add(new BasicNameValuePair("numberOfHits", "10"));
String html = httpGet(
opac_url + "/search.do?"
+ URLEncodedUtils.format(params, "UTF-8"), ENCODING);
return parse_search_wrapped(html, 1);
}
public SearchRequestResult volumeSearch(Map<String, String> query)
throws IOException, OpacErrorException {
List<NameValuePair> params = new ArrayList<>();
params.add(new BasicNameValuePair("methodToCall", "volumeSearch"));
params.add(new BasicNameValuePair("dbIdentifier", query
.get("dbIdentifier")));
params.add(new BasicNameValuePair("catKey", query.get("catKey")));
params.add(new BasicNameValuePair("periodical", "N"));
String html = httpGet(
opac_url + "/search.do?"
+ URLEncodedUtils.format(params, "UTF-8"), ENCODING);
return parse_search_wrapped(html, 1);
}
@Override
public SearchRequestResult searchGetPage(int page) throws IOException,
OpacErrorException {
if (!initialised) {
start();
}
String html = httpGet(opac_url
+ "/hitList.do?methodToCall=pos&identifier=" + identifier
+ "&curPos=" + (((page - 1) * resultcount) + 1), ENCODING);
return parse_search_wrapped(html, page);
}
public class SingleResultFound extends Exception {
}
protected SearchRequestResult parse_search_wrapped(String html, int page)
throws IOException, OpacErrorException {
try {
return parse_search(html, page);
} catch (SingleResultFound e) {
html = httpGet(opac_url + "/hitList.do?methodToCall=backToPrimaryHitList", ENCODING);
try {
return parse_search(html, page);
} catch (SingleResultFound e1) {
throw new NotReachableException();
}
}
}
public SearchRequestResult parse_search(String html, int page)
throws OpacErrorException, SingleResultFound {
Document doc = Jsoup.parse(html);
doc.setBaseUri(opac_url + "/searchfoo");
if (doc.select(".error").size() > 0) {
throw new OpacErrorException(doc.select(".error").text().trim());
} else if (doc.select(".nohits").size() > 0) {
throw new OpacErrorException(doc.select(".nohits").text().trim());
} else if (doc.select(".box-header h2, #nohits").text()
.contains("keine Treffer")) {
return new SearchRequestResult(new ArrayList<SearchResult>(), 0, 1,
1);
}
int results_total = -1;
String resultnumstr = doc.select(".box-header h2").first().text();
if (resultnumstr.contains("(1/1)") || resultnumstr.contains(" 1/1")) {
throw new SingleResultFound();
} else if (resultnumstr.contains("(")) {
results_total = Integer.parseInt(resultnumstr.replaceAll(
".*\\(([0-9]+)\\).*", "$1"));
} else if (resultnumstr.contains(": ")) {
results_total = Integer.parseInt(resultnumstr.replaceAll(
".*: ([0-9]+)$", "$1"));
}
Elements table = doc.select("table.data tbody tr");
identifier = null;
Elements links = doc.select("table.data a");
boolean haslink = false;
for (int i = 0; i < links.size(); i++) {
Element node = links.get(i);
if (node.hasAttr("href")
& node.attr("href").contains("singleHit.do") && !haslink) {
haslink = true;
try {
List<NameValuePair> anyurl = URLEncodedUtils.parse(
new URI(node.attr("href").replace(" ", "%20")
.replace("&", "&")), ENCODING);
for (NameValuePair nv : anyurl) {
if (nv.getName().equals("identifier")) {
identifier = nv.getValue();
break;
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
List<SearchResult> results = new ArrayList<>();
for (int i = 0; i < table.size(); i++) {
Element tr = table.get(i);
SearchResult sr = new SearchResult();
if (tr.select("td img[title]").size() > 0) {
String title = tr.select("td img").get(0).attr("title");
String[] fparts = tr.select("td img").get(0).attr("src")
.split("/");
String fname = fparts[fparts.length - 1];
MediaType default_by_fname = defaulttypes.get(fname
.toLowerCase(Locale.GERMAN).replace(".jpg", "")
.replace(".gif", "").replace(".png", ""));
MediaType default_by_title = defaulttypes.get(title);
MediaType default_name = default_by_title != null ? default_by_title
: default_by_fname;
if (data.has("mediatypes")) {
try {
sr.setType(MediaType.valueOf(data.getJSONObject(
"mediatypes").getString(fname)));
} catch (JSONException | IllegalArgumentException e) {
sr.setType(default_name);
}
} else {
sr.setType(default_name);
}
}
String alltext = tr.text();
if (alltext.contains("eAudio") || alltext.contains("eMusic")) {
sr.setType(MediaType.MP3);
} else if (alltext.contains("eVideo")) {
sr.setType(MediaType.EVIDEO);
} else if (alltext.contains("eBook")) {
sr.setType(MediaType.EBOOK);
} else if (alltext.contains("Munzinger")) {
sr.setType(MediaType.EDOC);
}
if (tr.children().size() > 3
&& tr.child(3).select("img[title*=cover]").size() == 1) {
sr.setCover(tr.child(3).select("img[title*=cover]")
.attr("abs:src"));
if (sr.getCover().contains("showCover.do")) {
downloadCover(sr);
}
}
Element middlething;
if (tr.children().size() > 2 && tr.child(2).select("a").size() > 0) {
middlething = tr.child(2);
} else {
middlething = tr.child(1);
}
List<Node> children = middlething.childNodes();
if (middlething.select("div")
.not("#hlrightblock,.bestellfunktionen").size() == 1) {
Element indiv = middlething.select("div")
.not("#hlrightblock,.bestellfunktionen").first();
if (indiv.select("a").size() > 0 && indiv.children().size() > 1) {
children = indiv.childNodes();
}
} else if (middlething.select("span.titleData").size() == 1) {
children = middlething.select("span.titleData").first()
.childNodes();
}
int childrennum = children.size();
List<String[]> strings = new ArrayList<>();
for (int ch = 0; ch < childrennum; ch++) {
Node node = children.get(ch);
if (node instanceof TextNode) {
String text = ((TextNode) node).text().trim();
if (text.length() > 3) {
strings.add(new String[]{"text", "", text});
}
} else if (node instanceof Element) {
List<Node> subchildren = node.childNodes();
for (int j = 0; j < subchildren.size(); j++) {
Node subnode = subchildren.get(j);
if (subnode instanceof TextNode) {
String text = ((TextNode) subnode).text().trim();
if (text.length() > 3) {
strings.add(new String[]{
((Element) node).tag().getName(),
"text", text,
((Element) node).className(),
node.attr("style")});
}
} else if (subnode instanceof Element) {
String text = ((Element) subnode).text().trim();
if (text.length() > 3) {
strings.add(new String[]{
((Element) node).tag().getName(),
((Element) subnode).tag().getName(),
text, ((Element) node).className(),
node.attr("style")});
}
}
}
}
}
StringBuilder description = null;
if (tr.select("span.Z3988").size() == 1) {
// Sometimes there is a <span class="Z3988"> item which provides
// data in a standardized format.
List<NameValuePair> z3988data;
boolean hastitle = false;
try {
description = new StringBuilder();
z3988data = URLEncodedUtils.parse(new URI("http://dummy/?"
+ tr.select("span.Z3988").attr("title")), "UTF-8");
for (NameValuePair nv : z3988data) {
if (nv.getValue() != null) {
if (!nv.getValue().trim().equals("")) {
if (nv.getName().equals("rft.btitle")
&& !hastitle) {
description.append("<b>").append(nv.getValue()).append("</b>");
hastitle = true;
} else if (nv.getName().equals("rft.atitle")
&& !hastitle) {
description.append("<b>").append(nv.getValue()).append("</b>");
hastitle = true;
} else if (nv.getName().equals("rft.au")) {
description.append("<br />").append(nv.getValue());
} else if (nv.getName().equals("rft.date")) {
description.append("<br />").append(nv.getValue());
}
}
}
}
} catch (URISyntaxException e) {
description = null;
}
}
boolean described = false;
if (description != null && description.length() > 0) {
sr.setInnerhtml(description.toString());
described = true;
} else {
description = new StringBuilder();
}
int k = 0;
boolean yearfound = false;
boolean titlefound = false;
boolean sigfound = false;
for (String[] part : strings) {
if (!described) {
if (part[0].equals("a") && (k == 0 || !titlefound)) {
if (k != 0) {
description.append("<br />");
}
description.append("<b>").append(part[2]).append("</b>");
titlefound = true;
} else if (part[2].matches("\\D*[0-9]{4}\\D*")
&& part[2].length() <= 10) {
yearfound = true;
if (k != 0) {
description.append("<br />");
}
description.append(part[2]);
} else if (k == 1 && !yearfound
&& part[2].matches("^\\s*\\([0-9]{4}\\)$")) {
if (k != 0) {
description.append("<br />");
}
description.append(part[2]);
} else if (k == 1 && !yearfound
&& part[2].matches("^\\s*\\([0-9]{4}\\)$")) {
if (k != 0) {
description.append("<br />");
}
description.append(part[2]);
} else if (k == 1 && !yearfound) {
description.append("<br />");
description.append(part[2]);
} else if (k > 1 && k < 4 && !sigfound
&& part[0].equals("text")
&& part[2].matches("^[A-Za-z0-9,\\- ]+$")) {
description.append("<br />");
description.append(part[2]);
}
}
if (part.length == 4) {
if (part[0].equals("span") && part[3].equals("textgruen")) {
sr.setStatus(SearchResult.Status.GREEN);
} else if (part[0].equals("span")
&& part[3].equals("textrot")) {
sr.setStatus(SearchResult.Status.RED);
}
} else if (part.length == 5) {
if (part[4].contains("purple")) {
sr.setStatus(SearchResult.Status.YELLOW);
}
}
if (sr.getStatus() == null) {
if ((part[2].contains("entliehen") && part[2]
.startsWith("Vormerkung ist leider nicht möglich"))
|| part[2]
.contains(
"nur in anderer Zweigstelle ausleihbar und nicht bestellbar")) {
sr.setStatus(SearchResult.Status.RED);
} else if (part[2].startsWith("entliehen")
|| part[2]
.contains("Ein Exemplar finden Sie in einer anderen Zweigstelle")) {
sr.setStatus(SearchResult.Status.YELLOW);
} else if ((part[2].startsWith("bestellbar") && !part[2]
.contains("nicht bestellbar"))
|| (part[2].startsWith("vorbestellbar") && !part[2]
.contains("nicht vorbestellbar"))
|| (part[2].startsWith("vorbestellbar") && !part[2]
.contains("nicht vorbestellbar"))
|| (part[2].startsWith("vormerkbar") && !part[2]
.contains("nicht vormerkbar"))
|| (part[2].contains("heute zurückgebucht"))
|| (part[2].contains("ausleihbar") && !part[2]
.contains("nicht ausleihbar"))) {
sr.setStatus(SearchResult.Status.GREEN);
}
if (sr.getType() != null) {
if (sr.getType().equals(MediaType.EBOOK)
|| sr.getType().equals(MediaType.EVIDEO)
|| sr.getType().equals(MediaType.MP3))
// Especially Onleihe.de ebooks are often marked
// green though they are not available.
{
sr.setStatus(SearchResult.Status.UNKNOWN);
}
}
}
k++;
}
if (!described) {
sr.setInnerhtml(description.toString());
}
sr.setNr(10 * (page - 1) + i);
sr.setId(null);
results.add(sr);
}
resultcount = results.size();
return new SearchRequestResult(results, results_total, page);
}
@Override
public DetailedItem getResultById(String id, String homebranch)
throws IOException {
// Some libraries require start parameters for start.do, like Login=foo
String startparams = "";
if (data.has("startparams")) {
try {
startparams = data.getString("startparams") + "&";
} catch (JSONException e) {
e.printStackTrace();
}
}
String hbp = "";
if (homebranch != null) {
hbp = "&selectedViewBranchlib=" + homebranch;
}
String html = httpGet(opac_url + "/start.do?" + startparams
+ "searchType=1&Query=0%3D%22" + id + "%22" + hbp, ENCODING);
return loadDetail(html);
}
@Override
public DetailedItem getResult(int nr) throws IOException {
String html = httpGet(
opac_url
+ "/singleHit.do?tab=showExemplarActive&methodToCall=showHit&curPos="
+ (nr + 1) + "&identifier=" + identifier, ENCODING);
return loadDetail(html);
}
protected DetailedItem loadDetail(String html) throws IOException {
String html2 = httpGet(opac_url
+ "/singleHit.do?methodToCall=activateTab&tab=showTitleActive",
ENCODING);
String html3 = httpGet(
opac_url
+ "/singleHit.do?methodToCall=activateTab&tab=showAvailabilityActive",
ENCODING);
String coverJs = null;
Pattern coverPattern = Pattern.compile("\\$\\.ajax\\(\\{[\\n\\s]*url: '(jsp/result/cover" +
".jsp\\?[^']+')");
Matcher coverMatcher = coverPattern.matcher(html);
if (coverMatcher.find()) {
coverJs = httpGet(opac_url + "/" + coverMatcher.group(1), ENCODING);
}
DetailedItem result = parseDetail(html, html2, html3, coverJs, data, stringProvider);
try {
if (!result.getCover().contains("amazon")) downloadCover(result);
} catch (Exception e) {
}
return result;
}
static DetailedItem parseDetail(String html, String html2, String html3, String coverJs,
JSONObject data,
StringProvider stringProvider)
throws IOException {
Document doc = Jsoup.parse(html);
String opac_url = data.optString("baseurl", "");
doc.setBaseUri(opac_url);
Document doc2 = Jsoup.parse(html2);
doc2.setBaseUri(opac_url);
Document doc3 = Jsoup.parse(html3);
doc3.setBaseUri(opac_url);
DetailedItem result = new DetailedItem();
try {
result.setId(doc.select("#bibtip_id").text().trim());
} catch (Exception ex) {
ex.printStackTrace();
}
List<String> reservationlinks = new ArrayList<>();
for (Element link : doc3.select("#vormerkung a, #tab-content a")) {
String href = link.absUrl("href");
Map<String, String> hrefq = getQueryParamsFirst(href);
if (result.getId() == null) {
// ID retrieval
String key = hrefq.get("katkey");
if (key != null) {
result.setId(key);
break;
}
}
// Vormerken
if (hrefq.get("methodToCall") != null) {
if (hrefq.get("methodToCall").equals("doVormerkung")
|| hrefq.get("methodToCall").equals("doBestellung")) {
reservationlinks.add(href.split("\\?")[1]);
}
}
}
if (reservationlinks.size() == 1) {
result.setReservable(true);
result.setReservation_info(reservationlinks.get(0));
} else if (reservationlinks.size() == 0) {
result.setReservable(false);
} else {
// TODO: Multiple options - handle this case!
}
if (coverJs != null) {
Pattern srcPattern = Pattern.compile("<img .* src=\"([^\"]+)\">");
Matcher matcher = srcPattern.matcher(coverJs);
if (matcher.find()) {
result.setCover(matcher.group(1));
}
} else if (doc.select(".data td img").size() == 1) {
result.setCover(doc.select(".data td img").first().attr("abs:src"));
}
if (doc.select(".aw_teaser_title").size() == 1) {
result.setTitle(doc.select(".aw_teaser_title").first().text()
.trim());
} else if (doc.select(".data td strong").size() > 0) {
result.setTitle(doc.select(".data td strong").first().text().trim());
} else {
result.setTitle("");
}
if (doc.select(".aw_teaser_title_zusatz").size() > 0) {
result.addDetail(new Detail("Titelzusatz", doc
.select(".aw_teaser_title_zusatz").text().trim()));
}
String title = "";
String text = "";
boolean takeover = false;
Element detailtrs = doc2.select(".box-container .data td").first();
for (Node node : detailtrs.childNodes()) {
if (node instanceof Element) {
Element element = (Element) node;
if (element.tagName().equals("strong")) {
if (element.hasClass("c2")) {
if (!title.equals("")) {
result.addDetail(new Detail(title, text.trim()));
}
title = element.text().trim();
text = "";
} else {
text = text + element.text();
}
} else {
if (element.tagName().equals("a")) {
if (element.text().trim().contains("hier klicken") ||
title.contains("Link")) {
text = text + node.attr("href");
takeover = true;
break;
} else {
text = text + element.text();
}
}
}
} else if (node instanceof TextNode) {
text = text + ((TextNode) node).text();
}
}
if (!takeover) {
text = "";
title = "";
}
detailtrs = doc2.select("#tab-content .data td").first();
if (detailtrs != null) {
for (Node node : detailtrs.childNodes()) {
if (node instanceof Element) {
if (((Element) node).tagName().equals("strong")) {
if (!text.equals("") && !title.equals("")) {
result.addDetail(new Detail(title.trim(), text.trim()));
if (title.equals("Titel:")) {
result.setTitle(text.trim());
}
text = "";
}
title = ((Element) node).text().trim();
} else {
if (((Element) node).tagName().equals("a")
&& (((Element) node).text().trim()
.contains("hier klicken") || title
.equals("Link:"))) {
text = text + node.attr("href");
} else {
text = text + ((Element) node).text();
}
}
} else if (node instanceof TextNode) {
text = text + ((TextNode) node).text();
}
}
} else {
if (doc2.select("#tab-content .fulltitle tr").size() > 0) {
Elements rows = doc2.select("#tab-content .fulltitle tr");
for (Element tr : rows) {
if (tr.children().size() == 2) {
Element valcell = tr.child(1);
String value = valcell.text().trim();
if (valcell.select("a").size() == 1) {
value = valcell.select("a").first().absUrl("href");
}
result.addDetail(new Detail(tr.child(0).text().trim(),
value));
}
}
} else {
result.addDetail(new Detail(stringProvider
.getString(StringProvider.ERROR), stringProvider
.getString(StringProvider.COULD_NOT_LOAD_DETAIL)));
}
}
if (!text.equals("") && !title.equals("")) {
result.addDetail(new Detail(title.trim(), text.trim()));
if (title.equals("Titel:")) {
result.setTitle(text.trim());
}
}
for (Element link : doc3.select("#tab-content a")) {
Map<String, String> hrefq = getQueryParamsFirst(link.absUrl("href"));
if (result.getId() == null) {
// ID retrieval
String key = hrefq.get("katkey");
if (key != null) {
result.setId(key);
break;
}
}
}
for (Element link : doc3.select(".box-container a")) {
if (link.text().trim().equals("Download")) {
result.addDetail(new Detail(stringProvider
.getString(StringProvider.DOWNLOAD), link
.absUrl("href")));
}
}
Map<String, Integer> copy_columnmap = new HashMap<>();
// Default values
copy_columnmap.put("barcode", 1);
copy_columnmap.put("branch", 3);
copy_columnmap.put("status", 4);
Elements copy_columns = doc.select("#tab-content .data tr#bg2 th");
for (int i = 0; i < copy_columns.size(); i++) {
Element th = copy_columns.get(i);
String head = th.text().trim();
if (head.contains("Status")) {
copy_columnmap.put("status", i);
}
if (head.contains("Zweigstelle")) {
copy_columnmap.put("branch", i);
}
if (head.contains("Mediennummer")) {
copy_columnmap.put("barcode", i);
}
if (head.contains("Standort")) {
copy_columnmap.put("location", i);
}
if (head.contains("Signatur")) {
copy_columnmap.put("signature", i);
}
}
Pattern status_lent = Pattern
.compile(
"^(entliehen) bis ([0-9]{1,2}.[0-9]{1,2}.[0-9]{2," +
"4}) \\(gesamte Vormerkungen: ([0-9]+)\\)$");
Pattern status_and_barcode = Pattern.compile("^(.*) ([0-9A-Za-z]+)$");
Elements exemplartrs = doc.select("#tab-content .data tr").not("#bg2");
DateTimeFormatter fmt =
DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
for (Element tr : exemplartrs) {
try {
Copy copy = new Copy();
Element status = tr.child(copy_columnmap.get("status"));
Element barcode = tr.child(copy_columnmap.get("barcode"));
String barcodetext = barcode.text().trim()
.replace(" Wegweiser", "");
// STATUS
String statustext;
if (status.getElementsByTag("b").size() > 0) {
statustext = status.getElementsByTag("b").text().trim();
} else {
statustext = status.text().trim();
}
if (copy_columnmap.get("status").equals(copy_columnmap
.get("barcode"))) {
Matcher matcher1 = status_and_barcode.matcher(statustext);
if (matcher1.matches()) {
statustext = matcher1.group(1);
barcodetext = matcher1.group(2);
}
}
Matcher matcher = status_lent.matcher(statustext);
if (matcher.matches()) {
copy.setStatus(matcher.group(1));
copy.setReservations(matcher.group(3));
copy.setReturnDate(fmt.parseLocalDate(matcher.group(2)));
} else {
copy.setStatus(statustext.trim().replace(" Wegweiser", ""));
}
copy.setBarcode(barcodetext);
if (status.select("a[href*=doVormerkung]").size() == 1) {
copy.setResInfo(status.select("a[href*=doVormerkung]").attr("href")
.split("\\?")[1]);
}
String branchtext = tr
.child(copy_columnmap.get("branch")).text()
.trim().replace(" Wegweiser", "");
copy.setBranch(branchtext);
if (copy_columnmap.containsKey("location")) {
copy.setLocation(tr.child(copy_columnmap.get("location"))
.text().trim().replace(" Wegweiser", ""));
}
if (copy_columnmap
.containsKey("signature")) {
copy.setShelfmark(
tr.child(copy_columnmap.get("signature"))
.text().trim().replace(" Wegweiser", ""));
}
result.addCopy(copy);
} catch (Exception ex) {
ex.printStackTrace();
}
}
try {
Element isvolume = null;
Map<String, String> volume = new HashMap<>();
Elements links = doc.select(".data td a");
int elcount = links.size();
for (int eli = 0; eli < elcount; eli++) {
List<NameValuePair> anyurl = URLEncodedUtils.parse(new URI(
links.get(eli).attr("href")), "UTF-8");
for (NameValuePair nv : anyurl) {
if (nv.getName().equals("methodToCall")
&& nv.getValue().equals("volumeSearch")) {
isvolume = links.get(eli);
} else if (nv.getName().equals("catKey")) {
volume.put("catKey", nv.getValue());
} else if (nv.getName().equals("dbIdentifier")) {
volume.put("dbIdentifier", nv.getValue());
}
}
if (isvolume != null) {
volume.put("volume", "true");
result.setVolumesearch(volume);
break;
}
}
} catch (Exception e) {
e.printStackTrace();
}
return result;
}
@Override
public ReservationResult reservation(DetailedItem item, Account acc,
int useraction, String selection) throws IOException {
String reservation_info = item.getReservation_info();
final String branch_inputfield = "issuepoint";
Document doc = null;
String action = "reservation";
if (reservation_info.contains("doBestellung")) {
action = "order";
}
if (useraction == MultiStepResult.ACTION_CONFIRMATION) {
List<NameValuePair> nameValuePairs = new ArrayList<>(2);
nameValuePairs.add(new BasicNameValuePair("methodToCall", action));
nameValuePairs.add(new BasicNameValuePair("CSId", CSId));
String html = httpPost(opac_url + "/" + action + ".do",
new UrlEncodedFormEntity(nameValuePairs), ENCODING);
doc = Jsoup.parse(html);
} else if (selection == null || useraction == 0) {
String html = httpGet(opac_url + "/availability.do?"
+ reservation_info, ENCODING);
doc = Jsoup.parse(html);
if (doc.select("input[name=username]").size() > 0) {
CSId = doc.select("input[name=CSId]").val();
List<NameValuePair> nameValuePairs = new ArrayList<>(
2);
nameValuePairs.add(new BasicNameValuePair("username", acc
.getName()));
nameValuePairs.add(new BasicNameValuePair("password", acc
.getPassword()));
nameValuePairs.add(new BasicNameValuePair("methodToCall",
"submit"));
nameValuePairs.add(new BasicNameValuePair("CSId", CSId));
nameValuePairs.add(new BasicNameValuePair("login_action",
"Login"));
html = handleLoginMessage(httpPost(opac_url + "/login.do",
new UrlEncodedFormEntity(nameValuePairs), ENCODING));
doc = Jsoup.parse(html);
if (doc.getElementsByClass("error").size() == 0) {
logged_in = System.currentTimeMillis();
logged_in_as = acc;
}
}
if (doc.select("input[name=expressorder]").size() > 0) {
List<NameValuePair> nameValuePairs = new ArrayList<>(
2);
nameValuePairs.add(new BasicNameValuePair(branch_inputfield,
selection));
nameValuePairs.add(new BasicNameValuePair("methodToCall",
action));
nameValuePairs.add(new BasicNameValuePair("CSId", CSId));
nameValuePairs.add(new BasicNameValuePair("expressorder", " "));
html = httpPost(opac_url + "/" + action + ".do",
new UrlEncodedFormEntity(nameValuePairs), ENCODING);
doc = Jsoup.parse(html);
}
if (doc.select("input[name=" + branch_inputfield + "]").size() > 0) {
List<Map<String, String>> branches = new ArrayList<>();
for (Element option : doc
.select("input[name=" + branch_inputfield + "]")
.first().parent().parent().parent().select("td")) {
if (option.select("input").size() != 1) {
continue;
}
String value = option.text().trim();
String key = option.select("input").val();
Map<String, String> selopt = new HashMap<>();
selopt.put("key", key);
selopt.put("value", value);
branches.add(selopt);
}
ReservationResult result = new ReservationResult(
MultiStepResult.Status.SELECTION_NEEDED);
result.setActionIdentifier(ReservationResult.ACTION_BRANCH);
result.setSelection(branches);
return result;
}
} else if (useraction == ReservationResult.ACTION_BRANCH) {
List<NameValuePair> nameValuePairs = new ArrayList<>(2);
nameValuePairs.add(new BasicNameValuePair(branch_inputfield,
selection));
nameValuePairs.add(new BasicNameValuePair("methodToCall", action));
nameValuePairs.add(new BasicNameValuePair("CSId", CSId));
String html = httpPost(opac_url + "/" + action + ".do",
new UrlEncodedFormEntity(nameValuePairs), ENCODING);
doc = Jsoup.parse(html);
}
if (doc == null) {
return new ReservationResult(MultiStepResult.Status.ERROR);
}
if (doc.getElementsByClass("error").size() >= 1) {
return new ReservationResult(MultiStepResult.Status.ERROR, doc
.getElementsByClass("error").get(0).text());
}
if (doc.html().contains("jsp/error.jsp")) {
return new ReservationResult(MultiStepResult.Status.ERROR, doc
.getElementsByTag("h2").get(0).text());
}
if (doc.select("#CirculationForm p").size() > 0
&& doc.select("input[type=button]").size() >= 2) {
List<String[]> details = new ArrayList<>();
for (String row : doc.select("#CirculationForm p").first().html()
.split("<br>")) {
Document frag = Jsoup.parseBodyFragment(row);
if (frag.text().contains(":")) {
String[] split = frag.text().split(":");
if (split.length >= 2) {
details.add(new String[]{split[0].trim() + ":",
split[1].trim()});
}
} else {
details.add(new String[]{"", frag.text().trim()});
}
}
ReservationResult result = new ReservationResult(
Status.CONFIRMATION_NEEDED);
result.setDetails(details);
return result;
}
if (doc.select("#CirculationForm .textrot").size() >= 1) {
String errmsg = doc.select("#CirculationForm .textrot").get(0).text();
if (errmsg
.contains("Dieses oder andere Exemplare in anderer Zweigstelle ausleihbar")) {
Copy best = null;
for (Copy copy : item.getCopies()) {
if (copy.getResInfo() == null) {
continue;
}
if (best == null) {
best = copy;
continue;
}
try {
if (Integer.parseInt(copy.getReservations()) <
Long.parseLong(best.getReservations())) {
best = copy;
} else if (Integer.parseInt(copy
.getReservations()) == Long
.parseLong(best.getReservations())) {
if (copy.getReturnDate().isBefore(best.getReturnDate())) {
best = copy;
}
}
} catch (NumberFormatException e) {
}
}
if (best != null) {
item.setReservation_info(best
.getResInfo());
return reservation(item, acc, 0, null);
}
}
return new ReservationResult(MultiStepResult.Status.ERROR, errmsg);
}
if (doc.select("#CirculationForm td[colspan=2] strong").size() >= 1) {
return new ReservationResult(MultiStepResult.Status.OK, doc
.select("#CirculationForm td[colspan=2] strong").get(0)
.text());
}
return new ReservationResult(Status.OK);
}
@Override
public ProlongResult prolong(String a, Account account, int useraction,
String Selection) throws IOException {
// the URI of the page this item was found on and the query string the
// prolonging link links to, seperated by a $.
if (a.startsWith("§")) {
return new ProlongResult(MultiStepResult.Status.ERROR,
a.substring(1));
}
String[] parts = a.split("\\$");
String offset = parts[0];
String query = parts[1];
if (!initialised) {
start();
}
if (System.currentTimeMillis() - logged_in > SESSION_LIFETIME
|| logged_in_as == null) {
try {
account(account);
} catch (JSONException e) {
e.printStackTrace();
return new ProlongResult(MultiStepResult.Status.ERROR);
} catch (OpacErrorException e) {
return new ProlongResult(MultiStepResult.Status.ERROR,
e.getMessage());
}
} else if (logged_in_as.getId() != account.getId()) {
try {
account(account);
} catch (JSONException e) {
e.printStackTrace();
return new ProlongResult(MultiStepResult.Status.ERROR);
} catch (OpacErrorException e) {
return new ProlongResult(MultiStepResult.Status.ERROR,
e.getMessage());
}
}
// We have to call the page we originally found the link on first...
httpGet(opac_url + "/userAccount.do?methodToCall=showAccount&typ=1",
ENCODING);
if (!offset.equals("1")) {
httpGet(opac_url
+ "/userAccount.do?methodToCall=pos&accountTyp=AUSLEIHEN&anzPos="
+ offset, ENCODING);
}
String html = httpGet(opac_url + "/userAccount.do?" + query, ENCODING);
Document doc = Jsoup.parse(html);
if (doc.select("#middle .textrot").size() > 0) {
return new ProlongResult(MultiStepResult.Status.ERROR, doc
.select("#middle .textrot").first().text());
}
return new ProlongResult(MultiStepResult.Status.OK);
}
@Override
public CancelResult cancel(String media, Account account, int useraction,
String selection) throws IOException, OpacErrorException {
if (!initialised) {
start();
}
String[] parts = media.split("\\$");
String type = parts[0];
String offset = parts[1];
String query = parts[2];
if (System.currentTimeMillis() - logged_in > SESSION_LIFETIME
|| logged_in_as == null) {
try {
account(account);
} catch (JSONException e) {
e.printStackTrace();
throw new OpacErrorException(
stringProvider.getString(StringProvider.INTERNAL_ERROR));
}
} else if (logged_in_as.getId() != account.getId()) {
try {
account(account);
} catch (JSONException e) {
e.printStackTrace();
throw new OpacErrorException(
stringProvider.getString(StringProvider.INTERNAL_ERROR));
}
}
// We have to call the page we originally found the link on first...
httpGet(opac_url + "/userAccount.do?methodToCall=showAccount&typ="
+ type, ENCODING);
if (!offset.equals("1")) {
httpGet(opac_url + "/userAccount.do?methodToCall=pos&anzPos="
+ offset, ENCODING);
}
httpGet(opac_url + "/userAccount.do?" + query, ENCODING);
return new CancelResult(MultiStepResult.Status.OK);
}
protected String handleLoginMessage(String html)
throws IOException {
if (html.contains("methodToCall=done")) {
return httpGet(opac_url + "/login.do?methodToCall=done", ENCODING);
} else {
return html;
}
}
protected boolean login(Account acc) throws OpacErrorException {
String html;
List<NameValuePair> nameValuePairs = new ArrayList<>(2);
try {
String loginPage;
loginPage = httpGet(opac_url
+ "/userAccount.do?methodToCall=show&type=1", ENCODING);
Document loginPageDoc = Jsoup.parse(loginPage);
if (loginPageDoc.select("input[name=as_fid]").size() > 0) {
nameValuePairs.add(new BasicNameValuePair("as_fid",
loginPageDoc.select("input[name=as_fid]").first()
.attr("value")));
}
CSId = loginPageDoc.select("input[name=CSId]").val();
} catch (IOException e1) {
e1.printStackTrace();
}
nameValuePairs.add(new BasicNameValuePair("username", acc.getName()));
nameValuePairs
.add(new BasicNameValuePair("password", acc.getPassword()));
nameValuePairs.add(new BasicNameValuePair("CSId", CSId));
nameValuePairs.add(new BasicNameValuePair("methodToCall", "submit"));
try {
html = handleLoginMessage(httpPost(opac_url + "/login.do",
new UrlEncodedFormEntity(nameValuePairs), ENCODING));
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
return false;
} catch (ClientProtocolException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
Document doc = Jsoup.parse(html);
if (doc.getElementsByClass("error").size() > 0) {
throw new OpacErrorException(doc.getElementsByClass("error").get(0)
.text());
}
logged_in = System.currentTimeMillis();
logged_in_as = acc;
return true;
}
public static void parse_medialist(List<LentItem> media, Document doc, int offset,
JSONObject data) {
Elements copytrs = doc.select(".data tr");
doc.setBaseUri(data.optString("baseurl"));
DateTimeFormatter fmt = DateTimeFormat.forPattern("dd.MM.yyyy").withLocale(Locale.GERMAN);
int trs = copytrs.size();
if (trs == 1) {
return;
}
assert (trs > 0);
for (int i = 1; i < trs; i++) {
Element tr = copytrs.get(i);
LentItem item = new LentItem();
if (tr.text().contains("keine Daten") || (trs == 2 && tr.children().size() == 1)) {
return;
}
item.setTitle(tr.child(1).select("strong").text().trim());
try {
item.setAuthor(tr.child(1).html().split("<br[ /]*>")[1].trim());
String[] col2split = tr.child(2).html().split("<br[ /]*>");
String deadline = col2split[0].trim();
if (deadline.contains("-")) {
deadline = deadline.split("-")[1].trim();
}
try {
item.setDeadline(fmt.parseLocalDate(deadline).toString());
} catch (IllegalArgumentException e1) {
e1.printStackTrace();
}
if (col2split.length > 1) {
item.setHomeBranch(col2split[1].trim());
}
if (tr.select("a").size() > 0) {
for (Element link : tr.select("a")) {
String href = link.attr("abs:href");
Map<String, String> hrefq = getQueryParamsFirst(href);
if (hrefq.get("methodToCall").equals("renewalPossible")) {
item.setProlongData(offset + "$" + href.split("\\?")[1]);
item.setRenewable(true);
break;
}
}
} else if (tr.select(".textrot, .textgruen, .textdunkelblau")
.size() > 0) {
item.setProlongData(
"§" + tr.select(".textrot, .textgruen, .textdunkelblau").text());
item.setRenewable(false);
}
} catch (Exception ex) {
ex.printStackTrace();
}
media.add(item);
}
assert (media.size() == trs - 1);
}
protected void parse_reslist(String type,
List<ReservedItem> reservations, Document doc, int offset) {
Elements copytrs = doc.select(".data tr");
doc.setBaseUri(opac_url);
int trs = copytrs.size();
if (trs == 1) {
return;
}
assert (trs > 0);
for (int i = 1; i < trs; i++) {
Element tr = copytrs.get(i);
ReservedItem item = new ReservedItem();
if (tr.text().contains("keine Daten") || tr.children().size() == 1) {
return;
}
item.setTitle(tr.child(1).select("strong").text().trim());
try {
String[] rowsplit1 = tr.child(1).html().split("<br[ /]*>");
String[] rowsplit2 = tr.child(2).html().split("<br[ /]*>");
if (rowsplit1.length > 1) item.setAuthor(rowsplit1[1].trim());
if (rowsplit2.length > 2) item.setBranch(rowsplit2[2].trim());
if (rowsplit2.length > 2) item.setStatus(rowsplit2[0].trim());
if (tr.select("a").size() == 1) {
item.setCancelData(type + "$" + offset + "$" +
tr.select("a").attr("abs:href").split("\\?")[1]);
}
} catch (Exception e) {
e.printStackTrace();
}
reservations.add(item);
}
assert (reservations.size() == trs - 1);
}
@Override
public AccountData account(Account acc) throws IOException,
JSONException,
OpacErrorException {
start(); // TODO: Is this necessary?
int resultNum;
if (!login(acc)) {
return null;
}
// Geliehene Medien
String html = httpGet(opac_url
+ "/userAccount.do?methodToCall=showAccount&typ=1", ENCODING);
List<LentItem> medien = new ArrayList<>();
Document doc = Jsoup.parse(html);
doc.setBaseUri(opac_url);
parse_medialist(medien, doc, 1, data);
if (doc.select(".box-right").size() > 0) {
for (Element link : doc.select(".box-right").first().select("a")) {
String href = link.attr("abs:href");
Map<String, String> hrefq = getQueryParamsFirst(href);
if (hrefq == null || hrefq.get("methodToCall") == null) {
continue;
}
if (hrefq.get("methodToCall").equals("pos")
&& !"1".equals(hrefq.get("anzPos"))) {
html = httpGet(href, ENCODING);
parse_medialist(medien, Jsoup.parse(html),
Integer.parseInt(hrefq.get("anzPos")), data);
}
}
}
if (doc.select("#label1").size() > 0) {
resultNum = 0;
String rNum = doc.select("#label1").first().text().trim()
.replaceAll(".*\\(([0-9]*)\\).*", "$1");
if (rNum.length() > 0) {
resultNum = Integer.parseInt(rNum);
}
assert (resultNum == medien.size());
}
// Ordered media ("Bestellungen")
html = httpGet(opac_url + "/userAccount.do?methodToCall=showAccount&typ=6", ENCODING);
List<ReservedItem> reserved = new ArrayList<>();
doc = Jsoup.parse(html);
doc.setBaseUri(opac_url);
parse_reslist("6", reserved, doc, 1);
Elements label6 = doc.select("#label6");
if (doc.select(".box-right").size() > 0) {
for (Element link : doc.select(".box-right").first().select("a")) {
String href = link.attr("abs:href");
Map<String, String> hrefq = getQueryParamsFirst(href);
if (hrefq == null || hrefq.get("methodToCall") == null) {
break;
}
if (hrefq.get("methodToCall").equals("pos")
&& !"1".equals(hrefq.get("anzPos"))) {
html = httpGet(href, ENCODING);
parse_reslist("6", reserved, Jsoup.parse(html),
Integer.parseInt(hrefq.get("anzPos")));
}
}
}
// Prebooked media ("Vormerkungen")
html = httpGet(opac_url
+ "/userAccount.do?methodToCall=showAccount&typ=7", ENCODING);
doc = Jsoup.parse(html);
doc.setBaseUri(opac_url);
parse_reslist("7", reserved, doc, 1);
if (doc.select(".box-right").size() > 0) {
for (Element link : doc.select(".box-right").first().select("a")) {
String href = link.attr("abs:href");
Map<String, String> hrefq = getQueryParamsFirst(href);
if (hrefq == null || hrefq.get("methodToCall") == null) {
break;
}
if (hrefq.get("methodToCall").equals("pos")
&& !"1".equals(hrefq.get("anzPos"))) {
html = httpGet(href, ENCODING);
parse_reslist("7", reserved, Jsoup.parse(html),
Integer.parseInt(hrefq.get("anzPos")));
}
}
}
if (label6.size() > 0 && doc.select("#label7").size() > 0) {
resultNum = 0;
String rNum = label6.text().trim()
.replaceAll(".*\\(([0-9]*)\\).*", "$1");
if (rNum.length() > 0) {
resultNum = Integer.parseInt(rNum);
}
rNum = doc.select("#label7").text().trim()
.replaceAll(".*\\(([0-9]*)\\).*", "$1");
if (rNum.length() > 0) {
resultNum += Integer.parseInt(rNum);
}
assert (resultNum == reserved.size());
}
AccountData res = new AccountData(acc.getId());
if (doc.select("#label8").size() > 0) {
String text = doc.select("#label8").first().text().trim();
if (text.matches("Geb.+hren[^\\(]+\\(([0-9.,]+)[^0-9€A-Z]*(€|EUR|CHF|Fr)\\)")) {
text = text
.replaceAll(
"Geb.+hren[^\\(]+\\(([0-9.,]+)[^0-9€A-Z]*(€|EUR|CHF|Fr)\\)",
"$1 $2");
res.setPendingFees(text);
}
}
Pattern p = Pattern.compile("[^0-9.]*", Pattern.MULTILINE);
if (doc.select(".box3").size() > 0) {
for (Element box : doc.select(".box3")) {
if (box.select("strong").size() == 1) {
String text = box.select("strong").text();
if (text.equals("Jahresgebühren")) {
text = box.text();
text = p.matcher(text).replaceAll("");
res.setValidUntil(text);
}
}
}
}
res.setLent(medien);
res.setReservations(reserved);
return res;
}
@Override
public String getShareUrl(String id, String title) {
String startparams = "";
if (data.has("startparams")) {
try {
startparams = data.getString("startparams") + "&";
} catch (JSONException e) {
e.printStackTrace();
}
}
if (id != null && !id.equals("")) {
return opac_url + "/start.do?" + startparams
+ "searchType=1&Query=0%3D%22" + id + "%22";
} else {
try {
title = URLEncoder.encode(title, getDefaultEncoding());
} catch (UnsupportedEncodingException e) {
//noinspection deprecation
title = URLEncoder.encode(title);
}
return opac_url + "/start.do?" + startparams
+ "searchType=1&Query=-1%3D%22" + title + "%22";
}
}
@Override
public int getSupportFlags() {
int flags = SUPPORT_FLAG_ACCOUNT_PROLONG_ALL
| SUPPORT_FLAG_CHANGE_ACCOUNT;
flags |= SUPPORT_FLAG_ENDLESS_SCROLLING;
return flags;
}
@Override
public ProlongAllResult prolongAll(Account account, int useraction,
String selection) throws IOException {
if (!initialised) {
start();
}
if (System.currentTimeMillis() - logged_in > SESSION_LIFETIME
|| logged_in_as == null) {
try {
account(account);
} catch (JSONException e) {
e.printStackTrace();
return new ProlongAllResult(MultiStepResult.Status.ERROR);
} catch (OpacErrorException e) {
return new ProlongAllResult(MultiStepResult.Status.ERROR,
e.getMessage());
}
} else if (logged_in_as.getId() != account.getId()) {
try {
account(account);
} catch (JSONException e) {
e.printStackTrace();
return new ProlongAllResult(MultiStepResult.Status.ERROR);
} catch (OpacErrorException e) {
return new ProlongAllResult(MultiStepResult.Status.ERROR,
e.getMessage());
}
}
// We have to call the page we originally found the link on first...
String html = httpGet(
opac_url
+ "/userAccount.do?methodToCall=renewalPossible&renewal=account",
ENCODING);
Document doc = Jsoup.parse(html);
if (doc.select("table.data").size() > 0) {
List<Map<String, String>> result = new ArrayList<>();
for (Element td : doc.select("table.data tr td")) {
Map<String, String> line = new HashMap<>();
if (!td.text().contains("Titel")
|| !td.text().contains("Status")) {
continue;
}
String nextNodeIs = "";
for (Node n : td.childNodes()) {
String text;
if (n instanceof Element) {
text = ((Element) n).text();
} else if (n instanceof TextNode) {
text = ((TextNode) n).text();
} else {
continue;
}
if (text.trim().length() == 0) {
continue;
}
if (text.contains("Titel:")) {
nextNodeIs = ProlongAllResult.KEY_LINE_TITLE;
} else if (text.contains("Verfasser:")) {
nextNodeIs = ProlongAllResult.KEY_LINE_AUTHOR;
} else if (text.contains("Leihfristende:")) {
nextNodeIs = ProlongAllResult.KEY_LINE_NEW_RETURNDATE;
} else if (text.contains("Status:")) {
nextNodeIs = ProlongAllResult.KEY_LINE_MESSAGE;
} else if (text.contains("Mediennummer:")
|| text.contains("Signatur:")) {
nextNodeIs = "";
} else if (nextNodeIs.length() > 0) {
line.put(nextNodeIs, text.trim());
nextNodeIs = "";
}
}
result.add(line);
}
return new ProlongAllResult(MultiStepResult.Status.OK, result);
}
return new ProlongAllResult(MultiStepResult.Status.ERROR,
stringProvider.getString(StringProvider.COULD_NOT_LOAD_ACCOUNT));
}
@Override
public SearchRequestResult filterResults(Filter filter, Option option)
throws IOException, OpacErrorException {
// TODO Auto-generated method stub
return null;
}
@Override
public void checkAccountData(Account account) throws IOException,
JSONException, OpacErrorException {
start(); // TODO: Is this necessary?
boolean success = login(account);
if (!success) {
throw new NotReachableException("Login unsuccessful");
}
}
@Override
public void setLanguage(String language) {
// TODO Auto-generated method stub
}
@Override
public Set<String> getSupportedLanguages() throws IOException {
// TODO Auto-generated method stub
return null;
}
}
|
package org.mwc.debrief.core.ui.views;
import java.awt.Color;
import java.awt.Point;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.Enumeration;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.jface.action.IToolBarManager;
import org.eclipse.jface.action.MenuManager;
import org.eclipse.jface.action.Separator;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.ui.IActionBars;
import org.eclipse.ui.IWorkbenchPart;
import org.eclipse.ui.part.ViewPart;
import org.mwc.cmap.core.CorePlugin;
import org.mwc.cmap.core.DataTypes.Temporal.TimeProvider;
import org.mwc.cmap.core.ui_support.PartMonitor;
import org.mwc.cmap.plotViewer.editors.chart.SWTChart;
import Debrief.Wrappers.FixWrapper;
import Debrief.Wrappers.TrackWrapper;
import Debrief.Wrappers.Track.LightweightTrackWrapper;
import MWC.Algorithms.Projections.FlatProjection;
import MWC.GUI.CanvasType;
import MWC.GUI.Editable;
import MWC.GUI.Layers;
import MWC.GUI.Layers.OperateFunction;
import MWC.GUI.Chart.Painters.LocalGridPainter;
import MWC.GUI.Shapes.RangeRingShape;
import MWC.GenericData.HiResDate;
import MWC.GenericData.Watchable;
import MWC.GenericData.WatchableList;
import MWC.GenericData.WorldArea;
import MWC.GenericData.WorldDistance;
import MWC.GenericData.WorldLocation;
import MWC.GenericData.WorldVector;
import MWC.TacticalData.TrackDataProvider;
public class UnitCentricView extends ViewPart
{
private class PeriodAction extends Action
{
private final long _period;
private final PeriodOperation _operation;
public PeriodAction(final String title, final long period,
final PeriodOperation operation)
{
super(title);
_period = period;
_myOverviewChart.repaint();
_operation = operation;
}
@Override
public void run()
{
_operation.selected(_period);
_myOverviewChart.update();
}
}
private class DistanceAction extends Action
{
private final WorldDistance _distance;
private final DistanceOperation _operation;
public DistanceAction(final String title, final WorldDistance distance,
final DistanceOperation operation)
{
super(title);
_distance = distance;
_myOverviewChart.repaint();
_operation = operation;
}
@Override
public void run()
{
_operation.selected(_distance);
_myOverviewChart.update();
}
}
private static interface PeriodOperation
{
public void selected(long period);
}
private static interface DistanceOperation
{
public void selected(WorldDistance distance);
}
public static interface IOperateOnMatch
{
/**
* process this single data object
*
* @param rawSec
* the fix we're looking at
* @param offsetLocation
* unit-centric version of the location
* @param proportion
* how far back through the time period we are
*/
void doItTo(final FixWrapper rawSec, final WorldLocation offsetLocation,
final double proportion);
/**
* process the secondary track position that's nearest to the required time
*
* @param nearestInTime
* @param nearestOffset
*/
void processNearest(final FixWrapper nearestInTime,
final WorldLocation nearestOffset);
}
private class UnitCentricChart extends SWTChart
{
private static final long serialVersionUID = 1L;
private Point oldEnd;
public UnitCentricChart(final Composite parent)
{
super(null, parent, _myProjection);
}
@Override
public void chartFireSelectionChanged(final ISelection sel)
{
// just ignore it
}
private void checkDataCoverage(final Layers theLayers)
{
// check if we have null data area
if ((_myOverviewChart.getCanvas().getProjection().getDataArea() == null)
&& (_trackDataProvider != null))
{
final WatchableList primary = _trackDataProvider.getPrimaryTrack();
if (primary != null && primary instanceof WatchableList)
{
final WorldLocation origin = new WorldLocation(0d, 0d, 0d);
final WorldArea area = new WorldArea(origin, origin);
final IOperateOnMatch getBounds = new IOperateOnMatch()
{
@Override
public void doItTo(final FixWrapper rawSec,
final WorldLocation offsetLocation, final double proportion)
{
area.extend(offsetLocation);
}
@Override
public void processNearest(final FixWrapper nearestInTime,
final WorldLocation nearestOffset)
{
// ok, ignore
}
};
walkTree(theLayers, primary, _timeProvider
.getTime(), getBounds, getSnailLength());
// ok, store the data area
_myOverviewChart.getCanvas().getProjection().setDataArea(area);
}
}
}
protected Color colorFor(final Color color, final float proportion,
final Color backgroundColor)
{
// merge the foreground to the background
final int red = backgroundColor.getRed() - color.getRed();
final int green = backgroundColor.getGreen() - color.getGreen();
final int blue = backgroundColor.getBlue() - color.getBlue();
final float newRed = color.getRed() + red * proportion;
final float newGreen = color.getGreen() + green * proportion;
final float newBlue = color.getBlue() + blue * proportion;
return new Color((int) newRed, (int) newGreen, (int) newBlue);
}
@Override
public void paintMe(final CanvasType dest)
{
if (_theLayers == null)
{
CorePlugin.logError(IStatus.WARNING,
"Unit centric view is missing layers", null);
return;
}
if (_trackDataProvider == null)
{
CorePlugin.logError(IStatus.WARNING,
"Unit centric view is missing track data provider", null);
}
// ok, check we have primary track
if (_trackDataProvider.getPrimaryTrack() == null)
{
CorePlugin.logError(IStatus.WARNING,
"Unit centric view is missing primary track", null);
CorePlugin.showMessage("Unit Centric View",
"Please assign a primary track");
}
if (_timeProvider == null)
{
CorePlugin.logError(IStatus.WARNING,
"Unit centric view is missing time provider", null);
}
checkDataCoverage(_theLayers);
final WatchableList primary = _trackDataProvider.getPrimaryTrack();
// is it a track?
final TrackWrapper priTrack = primary instanceof TrackWrapper
? (TrackWrapper) primary : null;
// remember if we've overridden the interpolation
final boolean oldInterp;
if (priTrack != null)
{
oldInterp = priTrack.getInterpolatePoints();
priTrack.setInterpolatePoints(true);
}
else
{
oldInterp = false;
}
// reset the last point we were looking at
oldEnd = null;
// do we draw local grid
dest.setLineWidth(0f);
if (_showGrid.isChecked())
{
_localGrid.paint(dest);
}
if (_showRings.isChecked())
{
_rangeRings.paint(dest);
}
// get the time
final boolean isSnail = _snailPaint.isChecked();
final HiResDate subjectTime = _timeProvider.getTime();
final IOperateOnMatch paintIt;
if (isSnail)
{
paintIt = new IOperateOnMatch()
{
@Override
public void doItTo(final FixWrapper rawSec,
final WorldLocation offsetLocation, final double proportion)
{
dest.setLineWidth(3f);
// sort out the color
final Color newCol = colorFor(rawSec.getColor(), (float) proportion,
_myOverviewChart.getCanvas().getBackgroundColor());
dest.setColor(newCol);
rawSec.paintMe(dest, offsetLocation, rawSec.getColor());
// and the line
final Point newEnd = dest.toScreen(offsetLocation);
if (oldEnd != null)
{
dest.drawLine(oldEnd.x, oldEnd.y, newEnd.x, newEnd.y);
}
oldEnd = new Point(newEnd);
}
@Override
public void processNearest(final FixWrapper nearestInTime,
final WorldLocation nearestOffset)
{
// reset the last object pointer
oldEnd = null;
}
};
}
else
{
paintIt = new IOperateOnMatch()
{
@Override
public void doItTo(final FixWrapper rawSec,
final WorldLocation offsetLocation, final double proportion)
{
dest.setLineWidth(2f);
dest.setColor(rawSec.getColor());
rawSec.paintMe(dest, offsetLocation, rawSec.getColor());
// and the line
final Point newEnd = dest.toScreen(offsetLocation);
if (oldEnd != null)
{
dest.drawLine(oldEnd.x, oldEnd.y, newEnd.x, newEnd.y);
}
oldEnd = new Point(newEnd);
}
@Override
public void processNearest(final FixWrapper nearestInTime,
final WorldLocation nearestOffset)
{
dest.setLineWidth(3);
dest.setColor(Color.DARK_GRAY);
final Point pt = dest.toScreen(nearestOffset);
dest.drawRect(pt.x - 3, pt.y - 3, 7, 7);
// reset the last object pointer
oldEnd = null;
}
};
}
walkTree(_theLayers, primary, subjectTime, paintIt, getSnailLength());
// draw in the ownship marker last, so it's on top
dest.setLineWidth(2f);
final Point pt = _myOverviewChart.getCanvas().getProjection().toScreen(
new WorldLocation(0d, 0d, 0d));
dest.setColor(primary.getColor());
dest.drawOval(pt.x - 4, pt.y - 4, 8, 8);
dest.drawLine(pt.x, pt.y - 12, pt.x, pt.y + 5);
if (priTrack != null)
{
// restore interpolation on the primary track
priTrack.setInterpolatePoints(oldInterp);
}
}
}
/**
* convert an absolute location into a location relative to a primary track
*
* @param primary
* @param other
* @param origin
* @return
*/
private static WorldLocation processOffset(final FixWrapper primary,
final WorldLocation other, final WorldLocation origin)
{
// ok, work out offset from this
final WorldVector delta = other.subtract(primary.getLocation());
// we now have to rotate the delta, according to O/S course
final double curBearing = delta.getBearing();
// work out the bearing relative to O/S head
final double newBearing = curBearing - primary.getCourse();
// update the bearing
final WorldVector newDelta = new WorldVector(newBearing, delta.getRange(),
0d);
final WorldLocation pos = origin.add(newDelta);
return pos;
}
private static void walkTree(final Layers theLayers,
final WatchableList primary, final HiResDate subjectTime,
final IOperateOnMatch doIt, final long snailLength)
{
final WorldLocation origin = new WorldLocation(0d, 0d, 0d);
OperateFunction checkIt = new OperateFunction()
{
@Override
public void operateOn(Editable item)
{
final LightweightTrackWrapper other = (LightweightTrackWrapper) item;
if (!other.getVisible())
return;
// is it the primary?
if (other != primary)
{
// keep track of the fix nearest to the required DTG
FixWrapper nearestInTime = null;
WorldLocation nearestOffset = null;
long nearestDelta = Long.MAX_VALUE;
// ok, run back through the data
final Enumeration<Editable> pts = other.getPositionIterator();
while (pts.hasMoreElements())
{
final FixWrapper thisF = (FixWrapper) pts.nextElement();
final HiResDate hisD = thisF.getDTG();
final boolean useIt;
if (subjectTime == null)
{
useIt = true;
}
else
{
if (snailLength == Long.MAX_VALUE)
{
useIt = true;
}
else
{
final long offset = subjectTime.getDate().getTime() - hisD
.getDate().getTime();
useIt = offset > 0 && offset < snailLength;
}
}
if (useIt)
{
final Watchable[] nearest = primary.getNearestTo(hisD);
if (nearest != null && nearest.length > 0)
{
final Watchable nItem = nearest[0];
if (nItem instanceof FixWrapper)
{
final FixWrapper priFix = (FixWrapper) nItem;
final long diff = Math.abs(hisD.getDate().getTime()
- subjectTime.getDate().getTime());
if (nearestInTime == null || diff < nearestDelta)
{
nearestInTime = thisF;
nearestDelta = diff;
nearestOffset = processOffset(priFix, thisF.getLocation(),
origin);
}
final WorldLocation pos = processOffset(priFix, thisF
.getLocation(), origin);
// work out how far back down the leg we are
final long age = subjectTime.getDate().getTime() - thisF
.getDTG().getDate().getTime();
final double proportion = age / (double) snailLength;
doIt.doItTo(thisF, pos, proportion);
}
}
}
}
if (nearestInTime != null)
{
doIt.processNearest(nearestInTime, nearestOffset);
}
}
}
};
theLayers.walkVisibleItems(LightweightTrackWrapper.class, checkIt);
}
private UnitCentricChart _myOverviewChart;
private final FlatProjection _myProjection;
private long _snailLength = 1000 * 60 * 30;
/**
* helper application to help track creation/activation of new plots
*/
private PartMonitor _myPartMonitor;
protected Layers _targetLayers;
private Action _fitToWindow;
protected TrackDataProvider _trackDataProvider;
protected TimeProvider _timeProvider;
protected PropertyChangeListener _timeChangeListener;
private Action _normalPaint;
private Action _snailPaint;
private Action _showRings;
private Action _showGrid;
private final LocalGridPainter _localGrid;
private final RangeRingShape _rangeRings;
public UnitCentricView()
{
_myProjection = new FlatProjection();
_timeChangeListener = new PropertyChangeListener()
{
@Override
public void propertyChange(final PropertyChangeEvent evt)
{
// ok, trigger repaint
_myOverviewChart.update();
}
};
_localGrid = new LocalGridPainter();
_localGrid.setDelta(new WorldDistance(30, WorldDistance.KM));
_localGrid.setOrigin(new WorldLocation(0d, 0d, 0d));
_rangeRings = new RangeRingShape(new WorldLocation(0d, 0d, 0d), 5,
new WorldDistance(5, WorldDistance.KM));
}
private void contributeToActionBars()
{
final IActionBars bars = getViewSite().getActionBars();
fillLocalPullDown(bars.getMenuManager());
fillLocalToolBar(bars.getToolBarManager());
}
@Override
public void createPartControl(final Composite parent)
{
// declare our context sensitive help
CorePlugin.declareContextHelp(parent, "org.mwc.debrief.help.OverviewChart");
// hey, first create the chart
_myOverviewChart = new UnitCentricChart(parent)
{
private static final long serialVersionUID = 1L;
@Override
public void canvasResized()
{
// just check we have a plot
if (_targetLayers != null)
{
super.canvasResized();
}
}
};
makeActions();
contributeToActionBars();
watchMyParts();
}
@Override
public void dispose()
{
super.dispose();
// cancel any listeners
if (_myPartMonitor != null)
{
_myPartMonitor.ditch();
}
}
private void fillLocalPullDown(final IMenuManager manager)
{
final DistanceOperation setRings = new DistanceOperation()
{
@Override
public void selected(final WorldDistance distance)
{
_rangeRings.setRingWidth(distance);
}
};
final MenuManager ringRadii = new MenuManager("Ring radii");
// ringRadii.setImageDescriptor(CorePlugin.getImageDescriptor(
// "icons/16/range_rings.png"));
ringRadii.add(new DistanceAction("100m", new WorldDistance(100,
WorldDistance.METRES), setRings));
ringRadii.add(new DistanceAction("500m", new WorldDistance(500,
WorldDistance.METRES), setRings));
ringRadii.add(new DistanceAction("1 km", new WorldDistance(1,
WorldDistance.KM), setRings));
ringRadii.add(new DistanceAction("1 nm", new WorldDistance(1,
WorldDistance.NM), setRings));
ringRadii.add(new DistanceAction("5 nm", new WorldDistance(5,
WorldDistance.NM), setRings));
ringRadii.add(new DistanceAction("10 nm", new WorldDistance(10,
WorldDistance.NM), setRings));
manager.add(ringRadii);
final DistanceOperation setGrid = new DistanceOperation()
{
@Override
public void selected(final WorldDistance distance)
{
_localGrid.setDelta(distance);
}
};
final MenuManager gridSize = new MenuManager("Grid size");
gridSize.add(new DistanceAction("100m", new WorldDistance(100,
WorldDistance.METRES), setGrid));
gridSize.add(new DistanceAction("500m", new WorldDistance(500,
WorldDistance.METRES), setGrid));
gridSize.add(new DistanceAction("1 km", new WorldDistance(1,
WorldDistance.KM), setGrid));
gridSize.add(new DistanceAction("1 nm", new WorldDistance(1,
WorldDistance.NM), setGrid));
gridSize.add(new DistanceAction("5 nm", new WorldDistance(5,
WorldDistance.NM), setGrid));
gridSize.add(new DistanceAction("10 nm", new WorldDistance(10,
WorldDistance.NM), setGrid));
manager.add(gridSize);
final PeriodOperation setSnail = new PeriodOperation()
{
@Override
public void selected(final long period)
{
_snailLength = period;
}
};
final MenuManager periodSize = new MenuManager("Snail length");
periodSize.add(new PeriodAction("5 Mins", 1000 * 60 * 5, setSnail));
periodSize.add(new PeriodAction("15 Mins", 1000 * 60 * 15, setSnail));
periodSize.add(new PeriodAction("30 Mins", 1000 * 60 * 30, setSnail));
periodSize.add(new PeriodAction("1 Hour", 1000 * 60 * 60 * 1, setSnail));
periodSize.add(new PeriodAction("2 Hours", 1000 * 60 * 60 * 2, setSnail));
manager.add(periodSize);
}
private void fillLocalToolBar(final IToolBarManager manager)
{
manager.add(_normalPaint);
manager.add(_snailPaint);
manager.add(new Separator());
manager.add(_showRings);
manager.add(_showGrid);
manager.add(new Separator());
manager.add(_fitToWindow);
// and the help link
manager.add(new Separator());
manager.add(CorePlugin.createOpenHelpAction(
"org.mwc.debrief.help.OverviewChart", null, this));
}
/**
* do a fit-to-window of the target viewport
*/
protected void fitTargetToWindow()
{
// TODO: resize to show all data
_myOverviewChart.getCanvas().getProjection().setDataArea(null);
// now, redraw our rectable
_myOverviewChart.repaint();
}
private long getSnailLength()
{
final boolean doSnail = _snailPaint.isChecked();
if (doSnail)
{
return _snailLength;
}
else
{
return Long.MAX_VALUE;
}
}
private void makeActions()
{
_fitToWindow = new Action()
{
@Override
public void run()
{
// ok, fit the plot to the window...
fitTargetToWindow();
}
};
_fitToWindow.setText("Fit to window");
_fitToWindow.setToolTipText(
"Zoom the selected plot out to show the full data");
_fitToWindow.setImageDescriptor(CorePlugin.getImageDescriptor(
"icons/16/fit_to_win.png"));
_normalPaint = new Action("Normal Painter", SWT.RADIO)
{
@Override
public void run()
{
_snailPaint.setChecked(false);
// and repaint
_myOverviewChart.update();
}
};
_normalPaint.setImageDescriptor(CorePlugin.getImageDescriptor(
"icons/16/normal.png"));
_normalPaint.setChecked(true);
_snailPaint = new Action("Snail Painter", SWT.RADIO)
{
@Override
public void run()
{
_normalPaint.setChecked(false);
// and repaint
_myOverviewChart.update();
}
};
_snailPaint.setChecked(false);
_snailPaint.setImageDescriptor(CorePlugin.getImageDescriptor(
"icons/16/snail.png"));
_showRings = new Action("Show range rings", SWT.CHECK)
{
@Override
public void run()
{
_myOverviewChart.update();
}
};
_showRings.setChecked(false);
_showRings.setImageDescriptor(CorePlugin.getImageDescriptor(
"icons/16/range_rings.png"));
_showGrid = new Action("Show local grid", SWT.CHECK)
{
@Override
public void run()
{
_myOverviewChart.update();
}
};
_showGrid.setChecked(false);
_showGrid.setImageDescriptor(CorePlugin.getImageDescriptor(
"icons/16/local_grid.png"));
}
/**
* ok, a new plot is selected - better show it then
*
* @param provider
* the new plot
* @param parentPart
* the part containing the plot
*/
protected void plotSelected(final Layers provider,
final IWorkbenchPart parentPart)
{
// ok, clear the map area
_myOverviewChart.getCanvas().getProjection().setDataArea(null);
// ok - update our chart to show the indicated plot.
_myOverviewChart.setLayers(provider);
// and trigger repaint
_myOverviewChart.repaint();
}
@Override
public void setFocus()
{
// TODO Auto-generated method stub
}
/**
* sort out what we're listening to...
*/
private void watchMyParts()
{
_myPartMonitor = new PartMonitor(getSite().getWorkbenchWindow()
.getPartService());
_myPartMonitor.addPartListener(Layers.class, PartMonitor.ACTIVATED,
new PartMonitor.ICallback()
{
@Override
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final Layers provider = (Layers) part;
// is this different to our current one?
if (provider != _targetLayers)
{
// ok, start listening to the new one
_targetLayers = provider;
plotSelected(provider, parentPart);
}
}
});
_myPartMonitor.addPartListener(Layers.class, PartMonitor.CLOSED,
new PartMonitor.ICallback()
{
@Override
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
if (part == _targetLayers)
{
// cancel the listeners
plotSelected(null, null);
_targetLayers = null;
}
}
});
// we also neeed the primary/secondary track provider
_myPartMonitor.addPartListener(TrackDataProvider.class,
PartMonitor.ACTIVATED, new PartMonitor.ICallback()
{
@Override
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final TrackDataProvider provider = (TrackDataProvider) part;
// is this different to our current one?
if (provider != _trackDataProvider)
{
// ok, remember it
_trackDataProvider = provider;
// and trigger update
_myOverviewChart.update();
}
}
});
_myPartMonitor.addPartListener(TrackDataProvider.class, PartMonitor.CLOSED,
new PartMonitor.ICallback()
{
@Override
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final TrackDataProvider provider = (TrackDataProvider) part;
// is this our current one?
if (provider == _trackDataProvider)
{
// ok, drop it
_trackDataProvider = null;
// and refresh
_myOverviewChart.update();
}
}
});
_myPartMonitor.addPartListener(TimeProvider.class, PartMonitor.ACTIVATED,
new PartMonitor.ICallback()
{
@Override
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final TimeProvider provider = (TimeProvider) part;
// is this different to our current one?
if (provider != _timeProvider)
{
if (_timeProvider != null)
{
// ditch the old one
_timeProvider.removeListener(_timeChangeListener,
TimeProvider.TIME_CHANGED_PROPERTY_NAME);
}
// ok, start listening to the new one
_timeProvider = provider;
_timeProvider.addListener(_timeChangeListener,
TimeProvider.TIME_CHANGED_PROPERTY_NAME);
}
}
});
_myPartMonitor.addPartListener(TimeProvider.class, PartMonitor.CLOSED,
new PartMonitor.ICallback()
{
@Override
public void eventTriggered(final String type, final Object part,
final IWorkbenchPart parentPart)
{
final TimeProvider provider = (TimeProvider) part;
// is this our current one?
if (provider == _timeProvider && _timeProvider != null)
{
// ditch the old one
_timeProvider.removeListener(_timeChangeListener,
TimeProvider.TIME_CHANGED_PROPERTY_NAME);
}
}
});
// ok we're all ready now. just try and see if the current part is valid
_myPartMonitor.fireActivePart(getSite().getWorkbenchWindow()
.getActivePage());
}
}
|
package com.vip416.Test;
import android.app.Activity;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
|
package pl.wurmonline.deedplanner;
import java.util.Random;
public class Constants {
public final static Random random = new Random();
public final static int FLOORS_LIMIT = 17;
public final static float HEIGHT_MOD = (35f/3f);
public final static String ENTER = System.getProperty("line.separator");
public final static String VERSION_STRING = "DeedPlanner 2.3.6";
}
|
package pl.wurmonline.deedplanner.data;
import java.util.*;
import java.util.Map.Entry;
import javax.media.opengl.GL2;
import org.w3c.dom.*;
import pl.wurmonline.deedplanner.*;
import pl.wurmonline.deedplanner.data.bridges.BridgePart;
import pl.wurmonline.deedplanner.data.storage.Data;
import pl.wurmonline.deedplanner.graphics.CameraType;
import pl.wurmonline.deedplanner.logic.Tab;
import pl.wurmonline.deedplanner.logic.TileFragment;
import pl.wurmonline.deedplanner.logic.symmetry.Symmetry;
import pl.wurmonline.deedplanner.util.*;
public final class Tile implements XMLSerializable {
private static final float[] deformMatrix = new float[] {1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1};
private final Map map;
private final int x;
private final int y;
private int height = 0;
private Ground ground;
private final HashMap<EntityData, TileEntity> entities;
private Label label;
private BridgePart bridgePart;
private int caveHeight = 5;
private int caveSize = 30;
private CaveData cave = Data.caves.get("sw");
private Label caveLabel;
private BridgePart caveBridgePart;
public Tile(Map map, int x, int y, Element tile) {
this.map = map;
this.x = x;
this.y = y;
height = (int) Float.parseFloat(tile.getAttribute("height"));
if (!tile.getAttribute("caveHeight").equals("")) {
caveHeight = (int) Float.parseFloat(tile.getAttribute("caveHeight"));
}
if (!tile.getAttribute("caveSize").equals("")) {
caveSize = (int) Float.parseFloat(tile.getAttribute("caveSize"));
}
ground = new Ground((Element) tile.getElementsByTagName("ground").item(0));
if (tile.getElementsByTagName("cave").getLength()!=0) {
cave = CaveData.get((Element) tile.getElementsByTagName("cave").item(0));
}
NodeList labels = tile.getElementsByTagName("label");
if (labels.getLength() != 0 && labels.item(0).getParentNode() == tile) {
label = new Label((Element) labels.item(0));
}
NodeList caveLabels = tile.getElementsByTagName("caveLabel");
if (caveLabels.getLength() != 0 && caveLabels.item(0).getParentNode() == tile) {
caveLabel = new Label((Element) caveLabels.item(0));
}
entities = new HashMap<>();
NodeList list = tile.getElementsByTagName("level");
for (int i=0; i<list.getLength(); i++) {
Element level = (Element) list.item(i);
int floor = Integer.parseInt(level.getAttribute("value"));
NodeList childNodes = level.getElementsByTagName("*");
for (int i2=0; i2<childNodes.getLength(); i2++) {
Element entity = (Element) childNodes.item(i2);
switch (entity.getNodeName().toLowerCase()) {
case "floor":
entities.put(new EntityData(floor, EntityType.FLOORROOF), new Floor(entity));
break;
case "hwall":
if (x == map.getWidth()) {
Log.out(this, "Detected wall on the edge of visible area, deleting");
continue;
}
Wall hwall = new Wall(entity);
if (hwall.data.houseWall) {
entities.put(new EntityData(floor, EntityType.HWALL), hwall);
}
else {
entities.put(new EntityData(floor, EntityType.HFENCE), hwall);
}
break;
case "vwall":
if (y == map.getHeight()) {
Log.out(this, "Detected wall on the edge of visible area, deleting");
continue;
}
Wall vwall = new Wall(entity);
if (vwall.data.houseWall) {
entities.put(new EntityData(floor, EntityType.VWALL), vwall);
}
else {
entities.put(new EntityData(floor, EntityType.VFENCE), vwall);
}
break;
case "hborder":
entities.put(new EntityData(0, EntityType.HBORDER), BorderData.get(entity));
break;
case "vborder":
entities.put(new EntityData(0, EntityType.VBORDER), BorderData.get(entity));
break;
case "roof":
entities.put(new EntityData(floor, EntityType.FLOORROOF), new Roof(entity));
break;
case "object":
if (x == map.getWidth() || y == map.getHeight()) {
Log.out(this, "Detected object on the edge of visible area, deleting");
continue;
}
ObjectLocation loc = ObjectLocation.parse(entity.getAttribute("position"));
if (entity.getAttribute("age").equals("")) {
entities.put(new ObjectEntityData(floor, loc), new GameObject(entity));
}
else {
entities.put(new ObjectEntityData(floor, loc), new Animal(entity));
}
break;
case "cave":
cave = CaveData.get(entity);
break;
case "label":
entities.put(new EntityData(floor, EntityType.LABEL), new Label(entity));
break;
}
}
}
}
public Tile(Map map, int x, int y) {
this.map = map;
this.x = x;
this.y = y;
if (!Data.grounds.isEmpty()) {
ground = new Ground(Data.grounds.get("gr"));
}
entities = new HashMap<>();
}
public Tile(Tile tile) {
this(tile.map, tile, tile.x, tile.y);
}
public Tile(Map map, Tile tile, int x, int y) {
this.map = map;
this.x = x;
this.y = y;
this.height = tile.height;
this.caveHeight = tile.caveHeight;
this.caveSize = tile.caveSize;
this.ground = tile.ground;
this.cave = tile.cave;
this.label = tile.label;
this.caveLabel = tile.caveLabel;
this.bridgePart = tile.bridgePart;
HashMap<EntityData, TileEntity> entities = new HashMap<>();
for (Entry<EntityData, TileEntity> entrySet : tile.entities.entrySet()) {
EntityData key = entrySet.getKey();
TileEntity value = entrySet.getValue();
entities.put(key, value.deepCopy());
}
this.entities = new HashMap(tile.entities);
}
public void render3d(GL2 g, boolean edge) {
if (!edge) {
if (Globals.floor>=0) {
renderGround(g);
}
else {
renderUnderground(g);
}
}
renderEntities(g);
}
private void renderGround(GL2 g) {
if (bridgePart != null && (Globals.renderBridgesEditing || Globals.camera.getCameraType() == CameraType.SPECTATOR)) {
g.glColor3f(1, 1, 1);
bridgePart.render(g, this);
}
float lightModifier = Globals.camera.getLevelVisibility(0);
float heightR = 1;
float heightG = 1;
float heightB = 1;
boolean renderColors = (Globals.renderHeight || Globals.tab == Tab.height);
if (renderColors) {
if (isFlat()) {
if (!pl.wurmonline.deedplanner.Properties.colorblind) {
heightR = 0.8f;
heightG = 1.0f;
heightB = 0.8f;
}
else {
heightR = 0.8f;
heightG = 0.8f;
heightB = 1.0f;
}
}
else {
heightR = 1.0f;
heightG = 0.8f;
heightB = 0.8f;
}
}
if (lightModifier < 1 || renderColors) {
g.glColor3f(lightModifier * heightR, lightModifier * heightG, lightModifier * heightB);
}
ground.render(g, this);
}
private void renderEntities(GL2 g) {
for (Entry<EntityData, TileEntity> e : entities.entrySet()) {
EntityData key = e.getKey();
final int floor = key.getFloor();
if (!MathUtils.isSameSign(floor, Globals.floor)) {
continue;
}
float colorMod = Globals.camera.getLevelVisibility(floor);
if (colorMod == 0) {
continue;
}
TileEntity entity = e.getValue();
g.glPushMatrix();
renderEntity(g, key, entity, colorMod);
g.glPopMatrix();
g.glColor3f(1, 1, 1);
}
}
private void renderEntity(GL2 g, EntityData data, TileEntity entity, float colorMod) {
int floor = data.getFloor();
int floorOffset = getFloorOffset(floor);
switch (data.getType()) {
case FLOORROOF:
g.glTranslatef(4, 0, floorOffset + getFloorHeight()/Constants.HEIGHT_MOD);
g.glColor3f(colorMod, colorMod, colorMod);
entity.render(g, this);
break;
case VWALL: case VFENCE:
float verticalWallHeight = getVerticalWallHeight();
float verticalWallHeightDiff = getVerticalWallHeightDiff();
renderWall(g, (Wall) entity, floorOffset, colorMod, verticalWallHeight, verticalWallHeightDiff, true);
break;
case HWALL: case HFENCE:
float horizontalWallHeight = getHorizontalWallHeight();
float horizontalWallHeightDiff = getHorizontalWallHeightDiff();
renderWall(g, (Wall) entity, floorOffset, colorMod, horizontalWallHeight, horizontalWallHeightDiff, false);
break;
case OBJECT:
ObjectEntityData objData = (ObjectEntityData) data;
ObjectLocation loc = objData.getLocation();
boolean onAbovegroundFloor = objData.getFloor() > 0 || this.getTileContent(0) != null;
boolean onUndergroundFloor = objData.getFloor() < -1 || this.getTileContent(-1) != null;
if (onAbovegroundFloor || onUndergroundFloor) {
g.glTranslatef(0, 0, Constants.FLOOR_MODEL_HEIGHT);
}
if (entity instanceof GameObject) {
GameObject obj = (GameObject) entity;
GameObjectData goData = obj.getData();
boolean isTree = goData.type.equals(Constants.TREE_TYPE);
boolean treeRenderingAllowed = (Globals.renderTreesEditing && Globals.camera.isEditing()) || (Globals.renderTreesSpectating && !Globals.camera.isEditing());
if (!isTree || (isTree && treeRenderingAllowed)) {
g.glColor3f(colorMod, colorMod, colorMod);
final float renderHeight;
if (obj.getData().floating) {
renderHeight = Math.max(0, floorOffset + getHeight(loc.getHorizontalAlign()/4f, loc.getVerticalAlign()/4f)/Constants.HEIGHT_MOD);
}
else {
renderHeight = floorOffset + getHeight(loc.getHorizontalAlign()/4f, loc.getVerticalAlign()/4f)/Constants.HEIGHT_MOD;
}
g.glTranslatef(loc.getHorizontalAlign(), loc.getVerticalAlign(), renderHeight);
obj.render(g, this);
}
}
else if (entity instanceof Animal) {
Animal animal = (Animal) entity;
animal.getTintColor().use(g, colorMod);
g.glTranslatef(loc.getHorizontalAlign(), loc.getVerticalAlign(), floorOffset + getHeight(loc.getHorizontalAlign()/4f, loc.getVerticalAlign()/4f)/Constants.HEIGHT_MOD);
animal.render(g, this);
}
break;
}
}
private int getFloorOffset(int floor) {
if (floor < 0) {
floor = Math.abs(floor) - 1;
}
return 3 * floor;
}
private void renderWall(GL2 g, Wall wall, int floorOffset, float colorMod, float wallElevation, float wallHeightDiff, boolean isVertical) {
g.glTranslatef(0, 0, floorOffset + wallElevation / Constants.HEIGHT_MOD);
if (isVertical) {
g.glRotatef(90, 0, 0, 1);
}
float diff = wallHeightDiff / 47f;
if (diff<0) {
g.glTranslatef(0, 0, -diff*4f);
}
deform(g, diff);
if (Globals.camera.getCameraType() == CameraType.TOP_VIEW) {
wall.data.color.use(g, colorMod);
}
else {
g.glColor3f(1, 1, 1);
}
wall.render(g, this);
g.glColor3f(1, 1, 1);
}
private void renderUnderground(GL2 g) {
if (caveBridgePart != null && (Globals.renderBridgesEditing || Globals.camera.isEditing())) {
g.glColor3f(1, 1, 1);
caveBridgePart.render(g, this);
}
if (Globals.camera.getCameraType() != CameraType.SPECTATOR) {
g.glColor3f(1f, 1f, 1f);
}
else if (cave.wall) {
g.glColor3f(0.9f, 0.9f, 0.9f);
}
else {
g.glColor3f(0.7f, 0.7f, 0.7f);
}
cave.render(g, this);
}
public void render2d(GL2 g) {
for (Entry<EntityData, TileEntity> e : entities.entrySet()) {
EntityData key = e.getKey();
TileEntity entity = e.getValue();
g.glPushMatrix();
switch (key.getType()) {
case VBORDER:
g.glRotatef(90, 0, 0, 1);
BorderData vBorder = (BorderData) entity;
if (Globals.camera.isEditing()) {
vBorder.render(g, this);
}
g.glColor3f(1, 1, 1);
break;
case HBORDER:
BorderData hBorder = (BorderData) entity;
if (Globals.camera.isEditing()) {
hBorder.render(g, this);
}
g.glColor3f(1, 1, 1);
break;
}
g.glPopMatrix();
g.glColor3f(1, 1, 1);
}
}
private float getFloorHeight() {
float h00 = getCurrentLayerHeight();
float h10 = getMap().getTile(this, 1, 0)!=null ? getMap().getTile(this, 1, 0).getCurrentLayerHeight(): 0;
float h01 = getMap().getTile(this, 0, 1)!=null ? getMap().getTile(this, 0, 1).getCurrentLayerHeight() : 0;
float h11 = getMap().getTile(this, 1, 1)!=null ? getMap().getTile(this, 1, 1).getCurrentLayerHeight() : 0;
return Math.max(Math.max(h00, h10), Math.max(h01, h11));
}
private float getVerticalWallHeight() {
return Math.min(getCurrentLayerHeight(), getMap().getTile(this, 0, 1).getCurrentLayerHeight());
}
private float getVerticalWallHeightDiff() {
return getMap().getTile(this, 0, 1).getCurrentLayerHeight() - getCurrentLayerHeight();
}
private float getHorizontalWallHeight() {
return Math.min(getCurrentLayerHeight(), getMap().getTile(this, 1, 0).getCurrentLayerHeight());
}
private float getHorizontalWallHeightDiff() {
return getMap().getTile(this, 1, 0).getCurrentLayerHeight() - getCurrentLayerHeight();
}
private void deform(GL2 g, float scale) {
deformMatrix[2] = scale;
g.glMultMatrixf(deformMatrix, 0);
}
public void renderSelection(GL2 g) {
if ((Globals.tab == Tab.labels || Globals.tab == Tab.height || Globals.tab == Tab.symmetry || Globals.tab == Tab.bridges)) {
g.glDisable(GL2.GL_ALPHA_TEST);
g.glEnable(GL2.GL_BLEND);
g.glBlendFunc(GL2.GL_SRC_ALPHA, GL2.GL_ONE_MINUS_SRC_ALPHA);
double color = System.currentTimeMillis();
color%=2000d; color-=1000d; color = Math.abs(color); color/=1000d;
g.glColor4d(1, 1, 0, 0.1d+0.2d*color);
g.glBegin(GL2.GL_QUADS);
g.glVertex2f(0, 0);
g.glVertex2f(0, 4);
g.glVertex2f(4, 4);
g.glVertex2f(4, 0);
g.glEnd();
g.glColor4f(1, 1, 1, 1);
g.glDisable(GL2.GL_BLEND);
g.glEnable(GL2.GL_ALPHA_TEST);
}
}
public void renderSymmetry(GL2 g) {
Symmetry sym = getMap().getSymmetry();
switch(sym.getType()) {
case TILE:
sym.renderTiles(g);
break;
case BORDER:
case CORNER:
if(getX() == sym.getX() && Globals.xSymLock)
sym.renderXBorder(g);
if(getY() == sym.getY() && Globals.ySymLock)
sym.renderYBorder(g);
break;
}
}
public void renderLabel(GL2 g) {
if (label!=null) {
label.render(g, this);
}
}
public void renderCaveLabel(GL2 g) {
if (caveLabel!=null) {
caveLabel.render(g, this);
}
}
public void serialize(Document doc, Element root) {
Element tile = doc.createElement("tile");
tile.setAttribute("x", Integer.toString(x));
tile.setAttribute("y", Integer.toString(y));
tile.setAttribute("height", Float.toString(height));
tile.setAttribute("caveHeight", Float.toString(caveHeight));
tile.setAttribute("caveSize", Float.toString(caveSize));
root.appendChild(tile);
ground.serialize(doc, tile);
cave.serialize(doc, tile);
if (label!=null) {
label.serialize(doc, tile, false);
}
if (caveLabel!=null) {
caveLabel.serialize(doc, tile, true);
}
final HashMap<Integer, Element> levels = new HashMap<>();
for (Entry<EntityData, TileEntity> e : entities.entrySet()) {
final EntityData key = e.getKey();
final TileEntity entity = e.getValue();
final int floor = key.getFloor();
Element level = levels.get(floor);
if (level==null) {
level = doc.createElement("level");
level.setAttribute("value", Integer.toString(key.getFloor()));
levels.put(key.getFloor(), level);
tile.appendChild(level);
}
switch (key.getType()) {
case FLOORROOF:
entity.serialize(doc, level);
break;
case HWALL: case HFENCE:
Element hWall = doc.createElement("hWall");
entity.serialize(doc, hWall);
level.appendChild(hWall);
break;
case VWALL: case VFENCE:
Element vWall = doc.createElement("vWall");
entity.serialize(doc, vWall);
level.appendChild(vWall);
break;
case HBORDER:
Element hDeco = doc.createElement("hBorder");
entity.serialize(doc, hDeco);
level.appendChild(hDeco);
break;
case VBORDER:
Element vDeco = doc.createElement("vBorder");
entity.serialize(doc, vDeco);
level.appendChild(vDeco);
break;
case OBJECT:
ObjectEntityData objectData = (ObjectEntityData) key;
Element objectElement = doc.createElement("object");
objectElement.setAttribute("position", objectData.getLocation().toString());
entity.serialize(doc, objectElement);
level.appendChild(objectElement);
break;
case LABEL:
entity.serialize(doc, level);
break;
}
}
}
public void destroy(GL2 g) {
ground.destroy(g);
}
protected int getEntityFloor(TileEntity entity) {
for (Entry<EntityData, TileEntity> entry : entities.entrySet()) {
if (entry.getValue() == entity) {
return entry.getKey().getFloor();
}
}
throw new DeedPlannerRuntimeException("Cannot find entity: "+entity);
}
public Map getMap() {
return map;
}
public Ground getGround() {
return ground;
}
public void setGround(GroundData data, RoadDirection dir) {
setGround(data, dir, true);
}
public void setGround(GroundData data) {
setGround(data, Globals.roadDirection, true);
}
void setGround(GroundData data, RoadDirection dir, boolean undo) {
if (data == null) {
Log.out(this, "Attempt to set ground with null GroundData");
return;
}
if (!new Ground(data).equals(ground)) {
Tile oldTile = new Tile(this);
if (!data.diagonal) {
dir = RoadDirection.CENTER;
}
ground = new Ground(data, dir);
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public int getX() {
return x;
}
public int getY() {
return y;
}
public void setHeight(int height) {
if (Globals.floor>=0) {
setHeight(height, true);
}
else if (!Globals.editSize) {
setCaveHeight(height, true);
}
else {
setCaveSize(height, true);
}
}
void setHeight(int height, boolean undo) {
if (this.height!=height) {
Tile oldTile = new Tile(this);
this.height = height;
map.recalculateHeight();
for (int i = -1; i <= 0; i++) {
for (int i2 = -1; i2 <= 0; i2++) {
Tile tile = map.getTile(this, i, i2);
if (tile != null) {
tile.getGround().markDirty();
}
}
}
if (undo) {
map.addUndo(this, oldTile);
for (int i = -1; i <= 0; i++) {
for (int i2 = -1; i2 <= 0; i2++) {
Tile tile = map.getTile(this, i, i2);
if (tile != null) {
tile.destroyBridge();
}
}
}
}
}
}
public float getHeight(final float xPart, final float yPart) {
final float xPartRev = 1f - xPart;
final float yPartRev = 1f - yPart;
final float h00 = getCurrentLayerHeight();
final float h10 = x!=map.getWidth() ? map.getTile(this, 1, 0).getCurrentLayerHeight() : 0;
final float h01 = y!=map.getHeight() ? map.getTile(this, 0, 1).getCurrentLayerHeight() : 0;
final float h11 = (x!=map.getWidth() && y!=map.getHeight()) ? map.getTile(this, 1, 1).getCurrentLayerHeight() : 0;
final float x0 = (h00*xPartRev + h10*xPart);
final float x1 = (h01*xPartRev + h11*xPart);
return (x0*yPartRev + x1*yPart);
}
public int getHeight() {
return height;
}
void setCaveHeight(int height, boolean undo) {
if (this.caveHeight!=height) {
Tile oldTile = new Tile(this);
this.caveHeight = height;
map.recalculateHeight();
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public int getCaveHeight() {
return caveHeight;
}
public int getCurrentLayerHeight() {
if (Globals.floor < 0) {
return caveHeight;
}
else {
return height;
}
}
void setCaveSize(int size, boolean undo) {
if (size<30 || size>300) {
return;
}
if (this.caveSize!=size) {
Tile oldTile = new Tile(this);
this.caveSize = size;
map.recalculateHeight();
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public int getCaveSize() {
return caveSize;
}
public void setCaveEntity(CaveData entity) {
setCaveEntity(entity, true);
}
void setCaveEntity(CaveData entity, boolean undo) {
if (this.cave != entity) {
Tile oldTile = new Tile(this);
this.cave = entity;
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public CaveData getCaveEntity() {
return cave;
}
public boolean isFlat() {
return map.getTile(this, 1, 1)!=null && getCurrentLayerHeight()==map.getTile(this, 1, 1).getCurrentLayerHeight() &&
map.getTile(this, 1, 0)!=null && getCurrentLayerHeight()==map.getTile(this, 1, 0).getCurrentLayerHeight() &&
map.getTile(this, 0, 1)!=null && getCurrentLayerHeight()==map.getTile(this, 0, 1).getCurrentLayerHeight();
}
public void setTileContent(TileEntity entity, int level) {
setTileContent(entity, level, true);
}
void setTileContent(TileEntity entity, int level, boolean undo) {
if (!isFlat() && entity != null) {
return;
}
final EntityData entityData = new EntityData(level, EntityType.FLOORROOF);
if (entity!=entities.get(entityData)) {
Tile oldTile = new Tile(this);
if (entity!=null) {
entities.put(entityData, entity);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public TileEntity getTileContent(int level) {
return entities.get(new EntityData(level, EntityType.FLOORROOF));
}
public void setHorizontalWall(WallData wall, int level) {
setHorizontalWall(wall, level, true);
}
void setHorizontalWall(WallData wall, int level, boolean undo) {
if (wall!=null && wall.houseWall) {
if (!(isFlat() || (map.getTile(this, 0, -1)!=null && map.getTile(this, 0, -1).isFlat()))) {
return;
}
}
final EntityData entityData;
if (wall!=null && wall.houseWall) {
entityData = new EntityData(level, EntityType.HWALL);
}
else if (wall!=null) {
entityData = new EntityData(level, EntityType.HFENCE);
}
else {
entityData = null;
}
final EntityData wallEntity = new EntityData(level, EntityType.HWALL);
final EntityData fenceEntity = new EntityData(level, EntityType.HFENCE);
final Wall currentWall = (Wall) entities.get(wallEntity);
final Wall currentFence = (Wall) entities.get(fenceEntity);
boolean reversed;
if (wall!=null && wall.houseWall) {
if (Globals.autoReverseWall) {
reversed = getTileContent(level)!=null && map.getTile(this, 0, -1).getTileContent(level)==null;
if (reversed == false) {
reversed = Globals.reverseWall;
}
}
else {
reversed = Globals.reverseWall;
}
}
else {
reversed = false;
}
if (!(new Wall(wall, reversed).equals(entities.get(entityData)))) {
Tile oldTile = new Tile(this);
if (wall!=null) {
entities.put(entityData, new Wall(wall, reversed));
if (wall.houseWall && !(wall.arch && currentFence!=null && currentFence.data.archBuildable)) {
entities.remove(fenceEntity);
}
else if (!wall.houseWall && !(wall.archBuildable && currentWall!=null && currentWall.data.arch)) {
entities.remove(wallEntity);
}
}
else {
entities.remove(wallEntity);
entities.remove(fenceEntity);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public Wall getHorizontalWall(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.HWALL));
}
public Wall getHorizontalFence(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.HFENCE));
}
public void clearHorizontalWalls() {
for (int i = 0; i < Constants.FLOORS_LIMIT; i++) {
clearHorizontalWalls(i);
}
}
public void clearHorizontalWalls(int level) {
entities.remove(new EntityData(level, EntityType.HWALL));
entities.remove(new EntityData(level, EntityType.HFENCE));
}
public void setVerticalWall(WallData wall, int level) {
setVerticalWall(wall, level, true);
}
void setVerticalWall(WallData wall, int level, boolean undo) {
if (wall!=null && wall.houseWall) {
if (!(isFlat() || map.getTile(this, -1, 0).isFlat())) {
return;
}
}
final EntityData entityData;
if (wall!=null && wall.houseWall) {
entityData = new EntityData(level, EntityType.VWALL);
}
else if (wall!=null) {
entityData = new EntityData(level, EntityType.VFENCE);
}
else {
entityData = null;
}
final EntityData wallEntity = new EntityData(level, EntityType.VWALL);
final EntityData fenceEntity = new EntityData(level, EntityType.VFENCE);
final Wall currentWall = (Wall) entities.get(wallEntity);
final Wall currentFence = (Wall) entities.get(fenceEntity);
boolean reversed;
if (wall!=null && wall.houseWall) {
if (Globals.autoReverseWall) {
reversed = getTileContent(level)==null && map.getTile(this, -1, 0).getTileContent(level)!=null;
if (reversed == false) {
reversed = Globals.reverseWall;
}
}
else {
reversed = Globals.reverseWall;
}
}
else {
reversed = false;
}
if (!(new Wall(wall, reversed).equals(entities.get(entityData)))) {
Tile oldTile = new Tile(this);
if (wall!=null) {
entities.put(entityData, new Wall(wall, reversed));
if (wall.houseWall && !(wall.arch && currentFence!=null && currentFence.data.archBuildable)) {
entities.remove(fenceEntity);
}
else if (!wall.houseWall && !(wall.archBuildable && currentWall!=null && currentWall.data.arch)) {
entities.remove(wallEntity);
}
}
else {
entities.remove(wallEntity);
entities.remove(fenceEntity);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public Wall getVerticalWall(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.VWALL));
}
public Wall getVerticalFence(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.VFENCE));
}
public void clearVerticalWalls() {
for (int i = 0; i < Constants.FLOORS_LIMIT; i++) {
clearVerticalWalls(i);
}
}
public void clearVerticalWalls(int level) {
entities.remove(new EntityData(level, EntityType.VWALL));
entities.remove(new EntityData(level, EntityType.VFENCE));
}
public void setHorizontalBorder(BorderData border) {
setHorizontalBorder(border, true);
}
void setHorizontalBorder(BorderData border, boolean undo) {
final EntityData entityData = new EntityData(0, EntityType.HBORDER);
if (border!=entities.get(entityData)) {
Tile oldTile = new Tile(this);
if (border!=null) {
entities.put(entityData, border);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public BorderData getHorizontalBorder() {
return (BorderData) entities.get(new EntityData(0, EntityType.HBORDER));
}
public void setVerticalBorder(BorderData border) {
setVerticalBorder(border, true);
}
void setVerticalBorder(BorderData border, boolean undo) {
final EntityData entityData = new EntityData(0, EntityType.VBORDER);
if (border!=entities.get(entityData)) {
Tile oldTile = new Tile(this);
if (border!=null) {
entities.put(entityData, border);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public BorderData getVerticalBorder() {
return (BorderData) entities.get(new EntityData(0, EntityType.VBORDER));
}
public void setFloorLabel(int floor, Label label) {
setFloorLabel(floor, label, true);
}
void setFloorLabel(int floor, Label label, boolean undo) {
Tile oldTile = new Tile(this);
EntityData entityData = new EntityData(floor, EntityType.LABEL);
if (label != null) {
entities.put(entityData, label);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
public Label getFloorLabel(int floor) {
return (Label) entities.get(new EntityData(floor, EntityType.LABEL));
}
public void setGlobalSurfaceLabel(Label label) {
setGlobalSurfaceLabel(label, true);
}
void setGlobalSurfaceLabel(Label label, boolean undo) {
Tile oldTile = new Tile(this);
this.label = label;
if (undo) {
map.addUndo(this, oldTile);
}
}
public Label getGlobalSurfaceLabel() {
return label;
}
public void setGlobalCaveLabel(Label caveLabel) {
setGlobalCaveLabel(caveLabel, true);
}
void setGlobalCaveLabel(Label caveLabel, boolean undo) {
Tile oldTile = new Tile(this);
this.caveLabel = caveLabel;
if (undo) {
map.addUndo(this, oldTile);
}
}
public Label getGlobalCaveLabel() {
return caveLabel;
}
public void setGameObject(GameObjectData data, ObjectLocation location, int floor) {
setGameObject(data, location, floor, true);
}
void setGameObject(GameObjectData data, ObjectLocation location, int floor, boolean undo) {
Tile oldTile = new Tile(this);
if (data!=null) {
entities.put(new ObjectEntityData(floor, location), new GameObject(data));
}
else if (location != null) {
entities.remove(new ObjectEntityData(floor, location));
}
else {
for (ObjectLocation loc : ObjectLocation.values()) {
entities.remove(new ObjectEntityData(floor, loc));
}
}
if (undo) {
map.addUndo(this, oldTile);
}
}
public GameObject getGameObject(ObjectLocation location, int floor) {
return (GameObject) entities.get(new ObjectEntityData(floor, location));
}
public void setAnimal(AnimalData data, ObjectLocation location, int floor) {
setAnimal(data, location, floor, true);
}
void setAnimal(AnimalData data, ObjectLocation location, int floor, boolean undo) {
Tile oldTile = new Tile(this);
if (data!=null) {
entities.put(new ObjectEntityData(floor, location), new Animal(data, Globals.animalAge, Globals.animalGender));
}
else {
for (ObjectLocation loc : ObjectLocation.values()) {
entities.remove(new ObjectEntityData(floor, loc));
}
}
if (undo) {
map.addUndo(this, oldTile);
}
}
public GridTileEntity getGridEntity(int level, ObjectLocation location) {
//assumption - ObjectEntityData key always have GameObject value.
return (GridTileEntity) entities.get(new ObjectEntityData(level, location));
}
public void destroyBridge() {
if (bridgePart != null) {
bridgePart.destroy();
}
}
/**
* This method shouldn't be called to destroy bridge manually - use destroyBridge() instead!
*/
public void setBridgePart(BridgePart bridgePart, boolean surfaced) {
if (surfaced) {
this.bridgePart = bridgePart;
}
else {
this.caveBridgePart = bridgePart;
}
}
public BridgePart getCurrentLayerBridgePart() {
if (Globals.floor < 0) {
return getCaveBridgePart();
}
else {
return getBridgePart();
}
}
public BridgePart getBridgePart() {
return bridgePart;
}
public BridgePart getCaveBridgePart() {
return caveBridgePart;
}
public Materials getMaterials() {
return getMaterials(false, false);
}
public Materials getMaterials(boolean withRight, boolean withTop) {
Materials materials = new Materials();
entities.values().stream().forEach((entity) -> {
materials.put(entity.getMaterials());
});
if (bridgePart != null) {
materials.put(bridgePart.getMaterials());
}
if (withRight) {
for (int i = 0; i<Constants.FLOORS_LIMIT; i++) {
Wall wall = map.getTile(this, 1, 0).getVerticalWall(i);
Wall fence = map.getTile(this, 1, 0).getVerticalFence(i);
if (wall!=null) {
materials.put(wall.getMaterials());
}
if (fence!=null) {
materials.put(fence.getMaterials());
}
}
}
if (withTop) {
for (int i = 0; i<Constants.FLOORS_LIMIT; i++) {
Wall wall = map.getTile(this, 0, 1).getHorizontalWall(i);
Wall fence = map.getTile(this, 0, 1).getHorizontalFence(i);
if (wall!=null) {
materials.put(wall.getMaterials());
}
if (fence!=null) {
materials.put(fence.getMaterials());
}
}
}
return materials;
}
public boolean isPassable(TileBorder border) {
switch (border) {
case SOUTH:
return getHorizontalWall(0)==null;
case NORTH:
return map.getTile(this, 0, 1)!=null &&
map.getTile(this, 0, 1).getHorizontalWall(0)==null;
case WEST:
return getVerticalWall(0)==null;
case EAST:
return map.getTile(this, 1, 0)!=null &&
map.getTile(this, 1, 0).getVerticalWall(0)==null;
default:
return false;
}
}
public String toString() {
return "Tile: ("+x+"; "+y+")";
}
public Tile[] getAffectedTiles(TileFragment frag) {
final Tile[] tiles;
if (null != frag) switch (frag) {
case CENTER:
tiles = new Tile[4];
tiles[0] = this;
tiles[1] = this.getMap().getTile(this, 1, 0);
tiles[2] = this.getMap().getTile(this, 1, 1);
tiles[3] = this.getMap().getTile(this, 0, 1);
return tiles;
case S:
tiles = new Tile[2];
tiles[0] = this;
tiles[1] = this.getMap().getTile(this, 1, 0);
return tiles;
case N:
tiles = new Tile[2];
tiles[0] = this.getMap().getTile(this, 0, 1);
tiles[1] = this.getMap().getTile(this, 1, 1);
return tiles;
case W:
tiles = new Tile[2];
tiles[0] = this;
tiles[1] = this.getMap().getTile(this, 0, 1);
return tiles;
case E:
tiles = new Tile[2];
tiles[0] = this.getMap().getTile(this, 1, 0);
tiles[1] = this.getMap().getTile(this, 1, 1);
return tiles;
case SW:
return new Tile[]{this};
case SE:
return new Tile[]{this.getMap().getTile(this, 1, 0)};
case NW:
return new Tile[]{this.getMap().getTile(this, 0, 1)};
case NE:
return new Tile[]{this.getMap().getTile(this, 1, 1)};
default:
throw new DeedPlannerRuntimeException("Illegal argument");
}
return null;
}
}
|
package pl.wurmonline.deedplanner.data;
import java.util.*;
import java.util.Map.Entry;
import javax.media.opengl.GL2;
import org.w3c.dom.*;
import pl.wurmonline.deedplanner.*;
import pl.wurmonline.deedplanner.data.storage.Data;
import pl.wurmonline.deedplanner.logic.Tab;
import pl.wurmonline.deedplanner.logic.TileFragment;
import pl.wurmonline.deedplanner.util.*;
public final class Tile implements XMLSerializable {
private static final float[] deformMatrix = new float[] {1, 0, 0, 0,
0, 1, 0, 0,
0, 0, 1, 0,
0, 0, 0, 1};
private final Map map;
private final int x;
private final int y;
private int height = 5;
private Ground ground;
private final HashMap<EntityData, TileEntity> entities;
private Label label;
private int caveHeight = 5;
private int caveSize = 30;
private CaveData cave = Data.caves.get("sw");
private Label caveLabel;
public Tile(Map map, int x, int y, Element tile) {
this.map = map;
this.x = x;
this.y = y;
height = (int) Float.parseFloat(tile.getAttribute("height"));
if (!tile.getAttribute("caveHeight").equals("")) {
caveHeight = (int) Float.parseFloat(tile.getAttribute("caveHeight"));
}
if (!tile.getAttribute("caveSize").equals("")) {
caveSize = (int) Float.parseFloat(tile.getAttribute("caveSize"));
}
ground = new Ground((Element) tile.getElementsByTagName("ground").item(0));
if (tile.getElementsByTagName("cave").getLength()!=0) {
cave = CaveData.get((Element) tile.getElementsByTagName("cave").item(0));
}
NodeList labels = tile.getElementsByTagName("label");
if (labels.getLength()!=0) {
label = new Label((Element) labels.item(0));
}
NodeList caveLabels = tile.getElementsByTagName("caveLabel");
if (caveLabels.getLength()!=0) {
caveLabel = new Label((Element) caveLabels.item(0));
}
entities = new HashMap<>();
NodeList list = tile.getElementsByTagName("level");
for (int i=0; i<list.getLength(); i++) {
Element level = (Element) list.item(i);
int floor = Integer.parseInt(level.getAttribute("value"));
NodeList childNodes = level.getElementsByTagName("*");
for (int i2=0; i2<childNodes.getLength(); i2++) {
Element entity = (Element) childNodes.item(i2);
switch (entity.getNodeName().toLowerCase()) {
case "floor":
entities.put(new EntityData(floor, EntityType.FLOORROOF), new Floor(entity));
break;
case "hwall":
Wall hwall = new Wall(entity);
if (hwall.data.houseWall) {
entities.put(new EntityData(floor, EntityType.HWALL), hwall);
}
else {
entities.put(new EntityData(floor, EntityType.HFENCE), hwall);
}
break;
case "vwall":
Wall vwall = new Wall(entity);
if (vwall.data.houseWall) {
entities.put(new EntityData(floor, EntityType.VWALL), vwall);
}
else {
entities.put(new EntityData(floor, EntityType.VFENCE), vwall);
}
break;
case "hborder":
entities.put(new EntityData(0, EntityType.HBORDER), BorderData.get(entity));
break;
case "vborder":
entities.put(new EntityData(0, EntityType.VBORDER), BorderData.get(entity));
break;
case "roof":
entities.put(new EntityData(floor, EntityType.FLOORROOF), new Roof(entity));
break;
case "object":
ObjectLocation loc = ObjectLocation.parse(entity.getAttribute("position"));
entities.put(new ObjectEntityData(floor, loc), new GameObject(entity));
break;
case "cave":
cave = CaveData.get(entity);
break;
}
}
}
}
public Tile(Map map, int x, int y) {
this.map = map;
this.x = x;
this.y = y;
if (!Data.grounds.isEmpty()) {
ground = new Ground(Data.grounds.get("gr"));
}
entities = new HashMap<>();
}
public Tile(Tile tile) {
this(tile.map, tile, tile.x, tile.y);
}
public Tile(Map map, Tile tile, int x, int y) {
this.map = map;
this.x = x;
this.y = y;
this.height = tile.height;
this.ground = tile.ground;
this.cave = tile.cave;
this.label = tile.label;
HashMap<EntityData, TileEntity> entities = new HashMap<>();
for (Entry<EntityData, TileEntity> entrySet : tile.entities.entrySet()) {
EntityData key = entrySet.getKey();
TileEntity value = entrySet.getValue();
entities.put(key, value.deepCopy());
}
this.entities = new HashMap(tile.entities);
}
public void render3d(GL2 g, boolean edge) {
if (!edge) {
if (Globals.floor>=0) {
renderWorld(g);
}
else {
renderUnderground(g);
}
}
}
private void renderWorld(GL2 g) {
renderGround(g);
renderEntities(g);
}
private void renderGround(GL2 g) {
if ((Globals.upCamera && Globals.floor>=0 && Globals.floor<3) || !Globals.upCamera) {
if (Globals.upCamera && Globals.floor>=0 && Globals.floor<3) {
switch (Globals.floor) {
case 0:
g.glColor3f(1, 1, 1);
break;
case 1:
g.glColor3f(0.6f, 0.6f, 0.6f);
break;
case 2:
g.glColor3f(0.25f, 0.25f, 0.25f);
break;
}
}
ground.render(g, this);
}
}
private void renderEntities(GL2 g) {
for (Entry<EntityData, TileEntity> e : entities.entrySet()) {
EntityData key = e.getKey();
final int floor = key.getFloor();
float colorMod = 1;
if (Globals.upCamera) {
switch (Globals.floor-floor) {
case 0:
colorMod = 1;
break;
case 1:
colorMod = 0.6f;
break;
case 2:
colorMod = 0.25f;
break;
default:
continue;
}
}
TileEntity entity = e.getValue();
g.glPushMatrix();
switch (key.getType()) {
case FLOORROOF:
g.glTranslatef(4, 0, 3*floor + getFloorHeight()/Constants.HEIGHT_MOD);
g.glColor3f(colorMod, colorMod, colorMod);
entity.render(g, this);
break;
case VWALL: case VFENCE:
g.glTranslatef(0, 0, 3*floor + getVerticalWallHeight()/Constants.HEIGHT_MOD);
g.glRotatef(90, 0, 0, 1);
float vdiff = getVerticalWallHeightDiff()/47f;
if (vdiff<0) {
g.glTranslatef(0, 0, -vdiff*4f);
}
deform(g, vdiff);
Wall vwall = (Wall) entity;
if (Globals.upCamera) {
vwall.data.color.use(g, colorMod);
}
else {
g.glColor3f(1, 1, 1);
}
vwall.render(g, this);
g.glColor3f(1, 1, 1);
break;
case HWALL: case HFENCE:
g.glTranslatef(0, 0, 3*floor + getHorizontalWallHeight()/Constants.HEIGHT_MOD);
float hdiff = getHorizontalWallHeightDiff()/47f;
if (hdiff<0) {
g.glTranslatef(0, 0, -hdiff*4f);
}
deform(g, hdiff);
Wall hwall = (Wall) entity;
if (Globals.upCamera) {
hwall.data.color.use(g, colorMod);
}
else {
g.glColor3f(1, 1, 1);
}
hwall.render(g, this);
g.glColor3f(1, 1, 1);
break;
case OBJECT:
ObjectEntityData objData = (ObjectEntityData) key;
ObjectLocation loc = objData.getLocation();
GameObject obj = (GameObject) entity;
g.glColor3f(colorMod, colorMod, colorMod);
g.glTranslatef(loc.getHorizontalAlign(), loc.getVerticalAlign(), 3*floor + getHeight(loc.getHorizontalAlign()/4f, loc.getVerticalAlign()/4f)/Constants.HEIGHT_MOD);
obj.render(g, this);
break;
}
g.glPopMatrix();
g.glColor3f(1, 1, 1);
}
}
private void renderUnderground(GL2 g) {
cave.render(g, this);
}
public void render2d(GL2 g) {
for (Entry<EntityData, TileEntity> e : entities.entrySet()) {
EntityData key = e.getKey();
TileEntity entity = e.getValue();
g.glPushMatrix();
switch (key.getType()) {
case VBORDER:
g.glRotatef(90, 0, 0, 1);
BorderData vBorder = (BorderData) entity;
if (Globals.upCamera) {
vBorder.render(g, this);
}
g.glColor3f(1, 1, 1);
break;
case HBORDER:
BorderData hBorder = (BorderData) entity;
if (Globals.upCamera) {
hBorder.render(g, this);
}
g.glColor3f(1, 1, 1);
break;
}
g.glPopMatrix();
g.glColor3f(1, 1, 1);
}
}
private float getFloorHeight() {
float h00 = getHeight();
float h10 = getMap().getTile(this, 1, 0)!=null ? getMap().getTile(this, 1, 0).getHeight() : 0;
float h01 = getMap().getTile(this, 0, 1)!=null ? getMap().getTile(this, 0, 1).getHeight() : 0;
float h11 = getMap().getTile(this, 1, 1)!=null ? getMap().getTile(this, 1, 1).getHeight() : 0;
return Math.max(Math.max(h00, h10), Math.max(h01, h11));
}
private float getVerticalWallHeight() {
return Math.min(getHeight(), getMap().getTile(this, 0, 1).getHeight());
}
private float getVerticalWallHeightDiff() {
return getMap().getTile(this, 0, 1).getHeight() - getHeight();
}
private float getHorizontalWallHeight() {
return Math.min(getHeight(), getMap().getTile(this, 1, 0).getHeight());
}
private float getHorizontalWallHeightDiff() {
return getMap().getTile(this, 1, 0).getHeight() - getHeight();
}
private void deform(GL2 g, float scale) {
deformMatrix[2] = scale;
g.glMultMatrixf(deformMatrix, 0);
}
public void renderSelection(GL2 g) {
if ((Globals.tab == Tab.labels || Globals.tab == Tab.height)) {
g.glDisable(GL2.GL_ALPHA_TEST);
g.glEnable(GL2.GL_BLEND);
g.glBlendFunc(GL2.GL_SRC_ALPHA, GL2.GL_ONE_MINUS_SRC_ALPHA);
double color = System.currentTimeMillis();
color%=2000d; color-=1000d; color = Math.abs(color); color/=1000d;
g.glColor4d(1, 1, 0, 0.1d+0.2d*color);
g.glBegin(GL2.GL_QUADS);
g.glVertex2f(0, 0);
g.glVertex2f(0, 4);
g.glVertex2f(4, 4);
g.glVertex2f(4, 0);
g.glEnd();
g.glColor4f(1, 1, 1, 1);
g.glDisable(GL2.GL_BLEND);
g.glEnable(GL2.GL_ALPHA_TEST);
}
}
public void renderLabel(GL2 g) {
if (label!=null) {
label.render(g, this);
}
}
public void renderCaveLabel(GL2 g) {
if (caveLabel!=null) {
caveLabel.render(g, this);
}
}
public void serialize(Document doc, Element root) {
Element tile = doc.createElement("tile");
tile.setAttribute("x", Integer.toString(x));
tile.setAttribute("y", Integer.toString(y));
tile.setAttribute("height", Float.toString(height));
tile.setAttribute("caveHeight", Float.toString(caveHeight));
tile.setAttribute("caveSize", Float.toString(caveSize));
root.appendChild(tile);
ground.serialize(doc, tile);
cave.serialize(doc, tile);
if (label!=null) {
label.serialize(doc, tile, false);
}
if (caveLabel!=null) {
caveLabel.serialize(doc, tile, true);
}
final HashMap<Integer, Element> levels = new HashMap<>();
for (Entry<EntityData, TileEntity> e : entities.entrySet()) {
final EntityData key = e.getKey();
final TileEntity entity = e.getValue();
final int floor = key.getFloor();
Element level = levels.get(floor);
if (level==null) {
level = doc.createElement("level");
level.setAttribute("value", Integer.toString(key.getFloor()));
levels.put(key.getFloor(), level);
tile.appendChild(level);
}
switch (key.getType()) {
case FLOORROOF:
entity.serialize(doc, level);
break;
case HWALL: case HFENCE:
Element hWall = doc.createElement("hWall");
entity.serialize(doc, hWall);
level.appendChild(hWall);
break;
case VWALL: case VFENCE:
Element vWall = doc.createElement("vWall");
entity.serialize(doc, vWall);
level.appendChild(vWall);
break;
case HBORDER:
Element hDeco = doc.createElement("hBorder");
entity.serialize(doc, hDeco);
level.appendChild(hDeco);
break;
case VBORDER:
Element vDeco = doc.createElement("vBorder");
entity.serialize(doc, vDeco);
level.appendChild(vDeco);
break;
case OBJECT:
ObjectEntityData objectData = (ObjectEntityData) key;
Element objectElement = doc.createElement("object");
objectElement.setAttribute("position", objectData.getLocation().toString());
entity.serialize(doc, objectElement);
level.appendChild(objectElement);
break;
}
}
}
public Map getMap() {
return map;
}
public Ground getGround() {
return ground;
}
public void setGround(GroundData data, RoadDirection dir) {
setGround(data, dir, true);
}
public void setGround(GroundData data) {
setGround(data, Globals.roadDirection, true);
}
void setGround(GroundData data, RoadDirection dir, boolean undo) {
if (!new Ground(data).equals(ground)) {
Tile oldTile = new Tile(this);
if (!data.diagonal) {
dir = RoadDirection.CENTER;
}
ground = new Ground(data, dir);
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public int getX() {
return x;
}
public int getY() {
return y;
}
public void setHeight(int height) {
if (Globals.floor>=0) {
setHeight(height, true);
}
else if (!Globals.editSize) {
setCaveHeight(height, true);
}
else {
setCaveSize(height, true);
}
}
void setHeight(int height, boolean undo) {
if (this.height!=height) {
Tile oldTile = new Tile(this);
this.height = height;
map.recalculateHeight();
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public float getHeight(final float xPart, final float yPart) {
final float xPartRev = 1f - xPart;
final float yPartRev = 1f - yPart;
final float h00 = height;
final float h10 = x!=map.getWidth() ? map.getTile(this, 1, 0).height : 0;
final float h01 = y!=map.getHeight() ? map.getTile(this, 0, 1).height : 0;
final float h11 = (x!=map.getWidth() && y!=map.getHeight()) ? map.getTile(this, 1, 1).height : 0;
final float x0 = (h00*xPartRev + h10*xPart);
final float x1 = (h01*xPartRev + h11*xPart);
return (x0*yPartRev + x1*yPart);
}
public int getHeight() {
return height;
}
void setCaveHeight(int height, boolean undo) {
if (this.caveHeight!=height) {
Tile oldTile = new Tile(this);
this.caveHeight = height;
map.recalculateHeight();
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public int getCaveHeight() {
return caveHeight;
}
void setCaveSize(int size, boolean undo) {
if (size<30 || size>300) {
return;
}
if (this.caveSize!=size) {
Tile oldTile = new Tile(this);
this.caveSize = size;
map.recalculateHeight();
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public int getCaveSize() {
return caveSize;
}
public void setCaveEntity(CaveData entity) {
setCaveEntity(entity, true);
}
void setCaveEntity(CaveData entity, boolean undo) {
if (this.cave != entity) {
Tile oldTile = new Tile(this);
this.cave = entity;
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public CaveData getCaveEntity() {
return cave;
}
public boolean isFlat() {
return map.getTile(this, 1, 1)!=null && getHeight()==map.getTile(this, 1, 1).getHeight() &&
map.getTile(this, 1, 0)!=null && getHeight()==map.getTile(this, 1, 0).getHeight() &&
map.getTile(this, 0, 1)!=null && getHeight()==map.getTile(this, 0, 1).getHeight();
}
public void setTileContent(TileEntity entity, int level) {
setTileContent(entity, level, true);
}
void setTileContent(TileEntity entity, int level, boolean undo) {
if (!isFlat()) {
return;
}
if (entity instanceof Roof) {
for (TileEntity e : entities.values()) {
if (e instanceof Roof) {
return;
}
}
}
final EntityData entityData = new EntityData(level, EntityType.FLOORROOF);
if (entity!=entities.get(entityData)) {
Tile oldTile = new Tile(this);
if (entity!=null) {
entities.put(entityData, entity);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public TileEntity getTileContent(int level) {
return entities.get(new EntityData(level, EntityType.FLOORROOF));
}
public void setHorizontalWall(WallData wall, int level) {
setHorizontalWall(wall, level, true);
}
void setHorizontalWall(WallData wall, int level, boolean undo) {
if (wall!=null && wall.houseWall) {
if (!(isFlat() || (map.getTile(this, 0, -1)!=null && map.getTile(this, 0, -1).isFlat()))) {
return;
}
}
final EntityData entityData;
if (wall!=null && wall.houseWall) {
entityData = new EntityData(level, EntityType.HWALL);
}
else if (wall!=null) {
entityData = new EntityData(level, EntityType.HFENCE);
}
else {
entityData = null;
}
final EntityData wallEntity = new EntityData(level, EntityType.HWALL);
final EntityData fenceEntity = new EntityData(level, EntityType.HFENCE);
final Wall currentWall = (Wall) entities.get(wallEntity);
final Wall currentFence = (Wall) entities.get(fenceEntity);
boolean reversed;
if (wall!=null && wall.houseWall) {
if (Globals.autoReverseWall) {
reversed = getTileContent(level)!=null && map.getTile(this, 0, -1).getTileContent(level)==null;
if (reversed == false) {
reversed = Globals.reverseWall;
}
}
else {
reversed = Globals.reverseWall;
}
}
else {
reversed = false;
}
if (!(new Wall(wall, reversed).equals(entities.get(entityData)))) {
Tile oldTile = new Tile(this);
if (wall!=null) {
entities.put(entityData, new Wall(wall, reversed));
if (wall.houseWall && !(wall.arch && currentFence!=null && currentFence.data.archBuildable)) {
entities.remove(fenceEntity);
}
else if (!wall.houseWall && !(wall.archBuildable && currentWall!=null && currentWall.data.arch)) {
entities.remove(wallEntity);
}
}
else {
entities.remove(wallEntity);
entities.remove(fenceEntity);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public Wall getHorizontalWall(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.HWALL));
}
public Wall getHorizontalFence(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.HFENCE));
}
public void clearHorizontalWalls() {
for (int i = 0; i < Constants.FLOORS_LIMIT; i++) {
clearHorizontalWalls(i);
}
}
public void clearHorizontalWalls(int level) {
entities.remove(new EntityData(level, EntityType.HWALL));
entities.remove(new EntityData(level, EntityType.HFENCE));
}
public void setVerticalWall(WallData wall, int level) {
setVerticalWall(wall, level, true);
}
void setVerticalWall(WallData wall, int level, boolean undo) {
if (wall!=null && wall.houseWall) {
if (!(isFlat() || map.getTile(this, -1, 0).isFlat())) {
return;
}
}
final EntityData entityData;
if (wall!=null && wall.houseWall) {
entityData = new EntityData(level, EntityType.VWALL);
}
else if (wall!=null) {
entityData = new EntityData(level, EntityType.VFENCE);
}
else {
entityData = null;
}
final EntityData wallEntity = new EntityData(level, EntityType.VWALL);
final EntityData fenceEntity = new EntityData(level, EntityType.VFENCE);
final Wall currentWall = (Wall) entities.get(wallEntity);
final Wall currentFence = (Wall) entities.get(fenceEntity);
boolean reversed;
if (wall!=null && wall.houseWall) {
if (Globals.autoReverseWall) {
reversed = getTileContent(level)==null && map.getTile(this, -1, 0).getTileContent(level)!=null;
if (reversed == false) {
reversed = Globals.reverseWall;
}
}
else {
reversed = Globals.reverseWall;
}
}
else {
reversed = false;
}
if (!(new Wall(wall, reversed).equals(entities.get(entityData)))) {
Tile oldTile = new Tile(this);
if (wall!=null) {
entities.put(entityData, new Wall(wall, reversed));
if (wall.houseWall && !(wall.arch && currentFence!=null && currentFence.data.archBuildable)) {
entities.remove(fenceEntity);
}
else if (!wall.houseWall && !(wall.archBuildable && currentWall!=null && currentWall.data.arch)) {
entities.remove(wallEntity);
}
}
else {
entities.remove(wallEntity);
entities.remove(fenceEntity);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public Wall getVerticalWall(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.VWALL));
}
public Wall getVerticalFence(int level) {
return (Wall) entities.get(new EntityData(level, EntityType.VFENCE));
}
public void clearVerticalWalls() {
for (int i = 0; i < Constants.FLOORS_LIMIT; i++) {
clearVerticalWalls(i);
}
}
public void clearVerticalWalls(int level) {
entities.remove(new EntityData(level, EntityType.VWALL));
entities.remove(new EntityData(level, EntityType.VFENCE));
}
public void setHorizontalBorder(BorderData border) {
setHorizontalBorder(border, true);
}
void setHorizontalBorder(BorderData border, boolean undo) {
final EntityData entityData = new EntityData(0, EntityType.HBORDER);
if (border!=entities.get(entityData)) {
Tile oldTile = new Tile(this);
if (border!=null) {
entities.put(entityData, border);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public BorderData getHorizontalBorder() {
return (BorderData) entities.get(new EntityData(0, EntityType.HBORDER));
}
public void setVerticalBorder(BorderData border) {
setVerticalBorder(border, true);
}
void setVerticalBorder(BorderData border, boolean undo) {
final EntityData entityData = new EntityData(0, EntityType.VBORDER);
if (border!=entities.get(entityData)) {
Tile oldTile = new Tile(this);
if (border!=null) {
entities.put(entityData, border);
}
else {
entities.remove(entityData);
}
if (undo) {
map.addUndo(this, oldTile);
}
}
}
public BorderData getVerticalBorder() {
return (BorderData) entities.get(new EntityData(0, EntityType.VBORDER));
}
public void setLabel(Label label) {
setLabel(label, true);
}
void setLabel(Label label, boolean undo) {
Tile oldTile = new Tile(this);
this.label = label;
if (undo) {
map.addUndo(this, oldTile);
}
}
public Label getLabel() {
return label;
}
public void setCaveLabel(Label caveLabel) {
setCaveLabel(caveLabel, true);
}
void setCaveLabel(Label caveLabel, boolean undo) {
Tile oldTile = new Tile(this);
this.caveLabel = caveLabel;
if (undo) {
map.addUndo(this, oldTile);
}
}
public Label getCaveLabel() {
return caveLabel;
}
public void setGameObject(GameObjectData data, ObjectLocation location, int floor) {
setGameObject(data, location, floor, true);
}
void setGameObject(GameObjectData data, ObjectLocation location, int floor, boolean undo) {
Tile oldTile = new Tile(this);
if (data!=null) {
entities.put(new ObjectEntityData(floor, location), new GameObject(data));
}
else {
entities.remove(new ObjectEntityData(floor, location));
}
if (undo) {
map.addUndo(this, oldTile);
}
}
public GameObject getGameObject(int level, ObjectLocation location) {
//assumption - ObjectEntityData key always have GameObject value.
return (GameObject) entities.get(new ObjectEntityData(level, location));
}
public Materials getMaterials() {
return getMaterials(false, false);
}
public Materials getMaterials(boolean withRight, boolean withTop) {
Materials materials = new Materials();
entities.values().stream().forEach((entity) -> {
materials.put(entity.getMaterials());
});
if (withRight) {
for (int i = 0; i<Constants.FLOORS_LIMIT; i++) {
Wall wall = map.getTile(this, 1, 0).getVerticalWall(i);
Wall fence = map.getTile(this, 1, 0).getVerticalFence(i);
if (wall!=null) {
materials.put(wall.getMaterials());
}
if (fence!=null) {
materials.put(fence.getMaterials());
}
}
}
if (withTop) {
for (int i = 0; i<Constants.FLOORS_LIMIT; i++) {
Wall wall = map.getTile(this, 0, 1).getHorizontalWall(i);
Wall fence = map.getTile(this, 0, 1).getHorizontalFence(i);
if (wall!=null) {
materials.put(wall.getMaterials());
}
if (fence!=null) {
materials.put(fence.getMaterials());
}
}
}
return materials;
}
public boolean isPassable(TileBorder border) {
switch (border) {
case SOUTH:
return getHorizontalWall(0)==null;
case NORTH:
return map.getTile(this, 0, 1)!=null &&
map.getTile(this, 0, 1).getHorizontalWall(0)==null;
case WEST:
return getVerticalWall(0)==null;
case EAST:
return map.getTile(this, 1, 0)!=null &&
map.getTile(this, 1, 0).getVerticalWall(0)==null;
default:
return false;
}
}
public String toString() {
return "Tile: ("+x+"; "+y+")";
}
public Tile[] getAffectedTiles(TileFragment frag) {
final Tile[] tiles;
if (null != frag) switch (frag) {
case CENTER:
tiles = new Tile[4];
tiles[0] = this;
tiles[1] = this.getMap().getTile(this, 1, 0);
tiles[2] = this.getMap().getTile(this, 1, 1);
tiles[3] = this.getMap().getTile(this, 0, 1);
return tiles;
case S:
tiles = new Tile[2];
tiles[0] = this;
tiles[1] = this.getMap().getTile(this, 1, 0);
return tiles;
case N:
tiles = new Tile[2];
tiles[0] = this.getMap().getTile(this, 0, 1);
tiles[1] = this.getMap().getTile(this, 1, 1);
return tiles;
case W:
tiles = new Tile[2];
tiles[0] = this;
tiles[1] = this.getMap().getTile(this, 0, 1);
return tiles;
case E:
tiles = new Tile[2];
tiles[0] = this.getMap().getTile(this, 1, 0);
tiles[1] = this.getMap().getTile(this, 1, 1);
return tiles;
case SW:
return new Tile[]{this};
case SE:
return new Tile[]{this.getMap().getTile(this, 1, 0)};
case NW:
return new Tile[]{this.getMap().getTile(this, 0, 1)};
case NE:
return new Tile[]{this.getMap().getTile(this, 1, 1)};
default:
throw new DeedPlannerRuntimeException("Illegal argument");
}
return null;
}
}
|
package org.postgresql.util;
import java.util.Hashtable;
import java.io.Serializable;
public class ServerErrorMessage implements Serializable
{
private static final Character SEVERITY = new Character('S');
private static final Character MESSAGE = new Character('M');
private static final Character DETAIL = new Character('D');
private static final Character HINT = new Character('H');
private static final Character POSITION = new Character('P');
private static final Character WHERE = new Character('W');
private static final Character FILE = new Character('F');
private static final Character LINE = new Character('L');
private static final Character ROUTINE = new Character('R');
private static final Character SQLSTATE = new Character('C');
private static final Character INTERNAL_POSITION = new Character('p');
private static final Character INTERNAL_QUERY = new Character('q');
private final Hashtable m_mesgParts = new Hashtable();
private final int verbosity;
public ServerErrorMessage(String p_serverError, int verbosity)
{
this.verbosity = verbosity;
char[] l_chars = p_serverError.toCharArray();
int l_pos = 0;
int l_length = l_chars.length;
while (l_pos < l_length)
{
char l_mesgType = l_chars[l_pos];
if (l_mesgType != '\0')
{
l_pos++;
int l_startString = l_pos;
while (l_chars[l_pos] != '\0' && l_pos < l_length)
{
l_pos++;
}
String l_mesgPart = new String(l_chars, l_startString, l_pos - l_startString);
m_mesgParts.put(new Character(l_mesgType), l_mesgPart);
}
l_pos++;
}
}
public String getSQLState()
{
return (String)m_mesgParts.get(SQLSTATE);
}
public String getMessage()
{
return (String)m_mesgParts.get(MESSAGE);
}
public String getSeverity()
{
return (String)m_mesgParts.get(SEVERITY);
}
public String getDetail()
{
return (String)m_mesgParts.get(DETAIL);
}
public String getHint()
{
return (String)m_mesgParts.get(HINT);
}
public int getPosition()
{
return getIntegerPart(POSITION);
}
public String getWhere()
{
return (String)m_mesgParts.get(WHERE);
}
public String getFile()
{
return (String)m_mesgParts.get(FILE);
}
public int getLine()
{
return getIntegerPart(LINE);
}
public String getRoutine()
{
return (String)m_mesgParts.get(ROUTINE);
}
public String getInternalQuery()
{
return (String)m_mesgParts.get(INTERNAL_QUERY);
}
public int getInternalPosition()
{
return getIntegerPart(INTERNAL_POSITION);
}
private int getIntegerPart(Character c)
{
String s = (String)m_mesgParts.get(c);
if (s == null)
return 0;
return Integer.parseInt(s);
}
public String toString()
{
//Now construct the message from what the server sent
//The general format is:
//SEVERITY: Message \n
// Detail: \n
// Hint: \n
// Position: \n
// Where: \n
// Internal Query: \n
// Internal Position: \n
// Location: File:Line:Routine \n
// SQLState: \n
//Normally only the message and detail is included.
//If INFO level logging is enabled then detail, hint, position and where are
//included. If DEBUG level logging is enabled then all information
//is included.
StringBuffer l_totalMessage = new StringBuffer();
String l_message = (String)m_mesgParts.get(SEVERITY);
if (l_message != null)
l_totalMessage.append(l_message).append(": ");
l_message = (String)m_mesgParts.get(MESSAGE);
if (l_message != null)
l_totalMessage.append(l_message);
l_message = (String)m_mesgParts.get(DETAIL);
if (l_message != null)
l_totalMessage.append("\n ").append(GT.tr("Detail: {0}", l_message));
l_message = (String)m_mesgParts.get(HINT);
if (l_message != null)
l_totalMessage.append("\n ").append(GT.tr("Hint: {0}", l_message));
l_message = (String)m_mesgParts.get(POSITION);
if (l_message != null)
l_totalMessage.append("\n ").append(GT.tr("Position: {0}", l_message));
l_message = (String)m_mesgParts.get(WHERE);
if (l_message != null)
l_totalMessage.append("\n ").append(GT.tr("Where: {0}", l_message));
if (verbosity > 2)
{
String l_internalQuery = (String)m_mesgParts.get(INTERNAL_QUERY);
if (l_internalQuery != null)
l_totalMessage.append("\n ").append(GT.tr("Internal Query: {0}", l_internalQuery));
String l_internalPosition = (String)m_mesgParts.get(INTERNAL_POSITION);
if (l_internalPosition != null)
l_totalMessage.append("\n ").append(GT.tr("Internal Position: {0}", l_internalPosition));
String l_file = (String)m_mesgParts.get(FILE);
String l_line = (String)m_mesgParts.get(LINE);
String l_routine = (String)m_mesgParts.get(ROUTINE);
if (l_file != null || l_line != null || l_routine != null)
l_totalMessage.append("\n ").append(GT.tr("Location: File: {0}, Routine: {1}, Line: {2}", new Object[] {l_file, l_routine, l_line}));
l_message = (String)m_mesgParts.get(SQLSTATE);
if (l_message != null)
l_totalMessage.append("\n ").append(GT.tr("Server SQLState: {0}", l_message));
}
return l_totalMessage.toString();
}
}
|
package net.sf.cglib;
import java.lang.reflect.*;
/**
*
* @author baliuka
* @version $Id: ConstructorProxy.java,v 1.4 2003/01/24 19:29:58 herbyderby Exp $
*/
public abstract class ConstructorProxy {
private static java.lang.reflect.Method NEW_INSTANCE =
ReflectUtils.findMethod("ConstructorProxy.newInstance(Object[],MethodInterceptor)");
private static final ClassNameFactory nameFactory =
new ClassNameFactory("ConstructorProxiedByCGLIB");
private static final ClassKey CLASS_KEY_FACTORY =
(ClassKey)KeyFactory.create(ClassKey.class, null);
public static Object newClassKey(Class[] args){
return CLASS_KEY_FACTORY.newInstance(args);
}
public interface ClassKey{
public Object newInstance(Class[] args);
}
/** Creates a new instance of ConstructorProxy */
protected ConstructorProxy() {
}
public static Object create(Constructor constructor
)throws Throwable{
String className = nameFactory.
getNextName(constructor.getDeclaringClass());
Class gen = new Generator(className, constructor ,
constructor.getDeclaringClass().getClassLoader() ).define();
return (ConstructorProxy)gen.getConstructor(Constants.TYPES_EMPTY).
newInstance(null);
}
public abstract Object newInstance( Object args[],MethodInterceptor interceptor ) throws Throwable;
private static class Generator extends CodeGenerator {
private Constructor costructor;
public Generator(String className, Constructor costructor , ClassLoader loader) {
super(className, ConstructorProxy.class, loader);
this.costructor = costructor;
}
protected void generate() {
generateNullConstructor();
begin_method(NEW_INSTANCE);
new_instance( costructor.getDeclaringClass() );
dup();
Class types[] = costructor.getParameterTypes();
for (int i = 0; i < types.length - 1; i++) {
load_arg(0);
push(i);
aaload();
unbox(types[i]);
}
load_arg(1);
invoke_constructor(costructor.getDeclaringClass(), types );
return_value();
end_method();
}
}
}
|
package run.bach.project;
import java.nio.file.DirectoryNotEmptyException;
import java.nio.file.Files;
import run.bach.ToolOperator;
import run.bach.toolbox.TreeTool;
public record CleanTool(String name) implements ToolOperator {
public CleanTool() {
this("clean");
}
@Override
public void run(Operation operation) throws Exception {
var bach = operation.bach();
var out = bach.paths().out();
if (Files.notExists(out)) return;
bach.run(TreeTool.clean(out));
try /* to prune empty directories */ {
Files.deleteIfExists(out); // usually ".bach/out/"
var parent = out.getParent(); // also ".bach/", if empty
if (parent != null) Files.deleteIfExists(parent);
} catch (DirectoryNotEmptyException ignore) {
}
}
}
|
package cz.hobrasoft.pdfmu.signature;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.pdf.PdfSignatureAppearance;
import com.itextpdf.text.pdf.PdfStamper;
import com.itextpdf.text.pdf.security.BouncyCastleDigest;
import com.itextpdf.text.pdf.security.CrlClient;
import com.itextpdf.text.pdf.security.DigestAlgorithms;
import com.itextpdf.text.pdf.security.ExternalDigest;
import com.itextpdf.text.pdf.security.ExternalSignature;
import com.itextpdf.text.pdf.security.MakeSignature;
import com.itextpdf.text.pdf.security.OcspClient;
import com.itextpdf.text.pdf.security.PrivateKeySignature;
import com.itextpdf.text.pdf.security.TSAClient;
import cz.hobrasoft.pdfmu.InOutPdfArgs;
import cz.hobrasoft.pdfmu.Operation;
import cz.hobrasoft.pdfmu.OperationException;
import java.io.IOException;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.Provider;
import java.security.Security;
import java.security.cert.Certificate;
import java.util.Collection;
import java.util.logging.Logger;
import net.sourceforge.argparse4j.inf.Namespace;
import net.sourceforge.argparse4j.inf.Subparser;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
/**
* Adds a digital signature to a PDF document
*
* @author <a href="mailto:filip.bartek@hobrasoft.cz">Filip Bartek</a>
*/
public class OperationSignatureAdd implements Operation {
private static final Logger logger = Logger.getLogger(OperationSignatureAdd.class.getName());
@Override
public String getCommandName() {
return "add";
}
private final InOutPdfArgs inout = new InOutPdfArgs();
@Override
public Subparser configureSubparser(Subparser subparser) {
String help = "Digitally sign a PDF document";
// Configure the subparser
subparser.help(help)
.description(help)
.defaultHelp(true)
.setDefault("command", OperationSignatureAdd.class);
inout.addArguments(subparser);
signatureParameters.addArguments(subparser);
return subparser;
}
// digitalsignatures20130304.pdf : Code sample 1.6
// Initialize the security provider
private static final BouncyCastleProvider provider = new BouncyCastleProvider();
static {
// We need to register the provider because it needs to be accessible by its name globally.
// {@link com.itextpdf.text.pdf.security.PrivateKeySignature#PrivateKeySignature(PrivateKey pk, String hashAlgorithm, String provider)}
// uses the provider name.
Security.addProvider(provider);
}
// Initialize the digest algorithm
private static final ExternalDigest externalDigest = new BouncyCastleDigest();
// `signatureParameters` is a member variable
// so that we can add the arguments to the parser in `configureSubparser`.
// We need an instance of {@link SignatureParameters} in `configureSubparser`
// because the interface `ArgsConfiguration` does not allow static methods.
private final SignatureParameters signatureParameters = new SignatureParameters();
@Override
public void execute(Namespace namespace) throws OperationException {
inout.setFromNamespace(namespace);
// Initialize signature parameters
signatureParameters.setFromNamespace(namespace);
sign(inout, signatureParameters);
}
private static void sign(InOutPdfArgs inout, SignatureParameters signatureParameters) throws OperationException {
inout.openSignature();
PdfStamper stp = inout.getPdfStamper();
sign(stp, signatureParameters);
inout.close();
}
// Initialize the signature appearance
private static void sign(PdfStamper stp,
SignatureParameters signatureParameters) throws OperationException {
assert signatureParameters != null;
// Unwrap the signature parameters
SignatureAppearanceParameters signatureAppearanceParameters = signatureParameters.appearance;
KeystoreParameters keystoreParameters = signatureParameters.keystore;
KeyParameters keyParameters = signatureParameters.key;
String digestAlgorithm = signatureParameters.digestAlgorithm;
MakeSignature.CryptoStandard sigtype = signatureParameters.sigtype;
// Initialize the signature appearance
PdfSignatureAppearance sap = signatureAppearanceParameters.getSignatureAppearance(stp);
assert sap != null; // `stp` must have been created using `PdfStamper.createSignature` static method
sign(sap, keystoreParameters, keyParameters, digestAlgorithm, sigtype);
}
// Initialize and load the keystore
private static void sign(PdfSignatureAppearance sap,
KeystoreParameters keystoreParameters,
KeyParameters keyParameters,
String digestAlgorithm,
MakeSignature.CryptoStandard sigtype) throws OperationException {
assert keystoreParameters != null;
// Initialize and load keystore
KeyStore ks = keystoreParameters.loadKeystore();
sign(sap, ks, keyParameters, digestAlgorithm, sigtype);
}
// Get the private key and the certificate chain from the keystore
private static void sign(PdfSignatureAppearance sap,
KeyStore ks,
KeyParameters keyParameters,
String digestAlgorithm,
MakeSignature.CryptoStandard sigtype) throws OperationException {
assert keyParameters != null;
// Fix the values, especially if they were not set at all
keyParameters.fix(ks);
PrivateKey pk = keyParameters.getPrivateKey(ks);
Certificate[] chain = keyParameters.getCertificateChain(ks);
Provider signatureProvider;
{ // ksProvider
Provider ksProvider = ks.getProvider();
// "SunMSCAPI" provider must be used for signing if it was used for keystore loading.
// In case of other keystore providers,
// we use the default signature provider.
if ("SunMSCAPI".equals(ksProvider.getName())) {
signatureProvider = ksProvider;
} else {
signatureProvider = provider;
}
}
sign(sap, pk, digestAlgorithm, chain, sigtype, signatureProvider);
}
// Initialize the signature algorithm
private static void sign(PdfSignatureAppearance sap,
PrivateKey pk,
String digestAlgorithm,
Certificate[] chain,
MakeSignature.CryptoStandard sigtype,
Provider signatureProvider) throws OperationException {
assert digestAlgorithm != null;
// Initialize the signature algorithm
logger.info(String.format("Digest algorithm: %s", digestAlgorithm));
if (DigestAlgorithms.getAllowedDigests(digestAlgorithm) == null) {
throw new OperationException(String.format("The digest algorithm %s is not supported.", digestAlgorithm));
}
logger.info(String.format("Signature security provider: %s", signatureProvider.getName()));
ExternalSignature externalSignature = new PrivateKeySignature(pk, digestAlgorithm, signatureProvider.getName());
sign(sap, externalSignature, chain, sigtype);
}
// Set the "external digest" algorithm
private static void sign(PdfSignatureAppearance sap,
ExternalSignature externalSignature,
Certificate[] chain,
MakeSignature.CryptoStandard sigtype) throws OperationException {
// Use the static BouncyCastleDigest instance
sign(sap, OperationSignatureAdd.externalDigest, externalSignature, chain, sigtype);
}
// Sign the document
private static void sign(PdfSignatureAppearance sap,
ExternalDigest externalDigest,
ExternalSignature externalSignature,
Certificate[] chain,
MakeSignature.CryptoStandard sigtype) throws OperationException {
// TODO?: Set some of the following parameters more sensibly
// Certificate Revocation List
// digitalsignatures20130304.pdf : Section 3.2
Collection<CrlClient> crlList = null;
// Online Certificate Status Protocol
// digitalsignatures20130304.pdf : Section 3.2.4
OcspClient ocspClient = null;
// Time Stamp Authority
// digitalsignatures20130304.pdf : Section 3.3
TSAClient tsaClient = null;
// digitalsignatures20130304.pdf : Section 3.5
// The value of 0 means "try a generous educated guess".
// We need not change this unless we want to optimize the resulting PDF document size.
int estimatedSize = 0;
logger.info(String.format("Cryptographic standard (signature format): %s", sigtype));
try {
MakeSignature.signDetached(sap, externalDigest, externalSignature, chain, crlList, ocspClient, tsaClient, estimatedSize, sigtype);
} catch (IOException | DocumentException | GeneralSecurityException ex) {
throw new OperationException("Could not sign the document.", ex);
} catch (NullPointerException ex) {
throw new OperationException("Could not sign the document. Invalid digest algorithm?", ex);
}
logger.info("Document successfully signed.");
}
}
|
package soot.javaToJimple;
import java.util.*;
public class JimpleBodyBuilder {
soot.jimple.JimpleBody body; // body of the method being created
ArrayList exceptionTable; // list of exceptions
Stack endControlNoop = new Stack(); // for break
Stack condControlNoop = new Stack(); // continue
Stack monitorStack; // for synchronized blocks
Stack tryStack; // for try stmts in case of returns
Stack catchStack; // for catch stmts in case of returns
HashMap labelBreakMap; // for break label --> nop to jump to
HashMap labelContinueMap; // for continue label --> nop to jump to
HashMap localsMap = new HashMap(); // localInst --> soot local
HashMap getThisMap = new HashMap(); // type --> local to ret
soot.Local specialThisLocal;
soot.Local outerClassParamLocal; // outer class this
private int paramRefCount = 0; // counter for param ref stmts
LocalGenerator lg; // for generated locals not in orig src
/**
* Jimple Body Creation
*/
public soot.jimple.JimpleBody createJimpleBody(polyglot.ast.Block block, List formals, soot.SootMethod sootMethod){
createBody(sootMethod);
lg = new LocalGenerator(body);
// create this formal except for static methods
if (!soot.Modifier.isStatic(sootMethod.getModifiers())) {
soot.RefType type = sootMethod.getDeclaringClass().getType();
specialThisLocal = soot.jimple.Jimple.v().newLocal("this", type);
body.getLocals().add(specialThisLocal);
soot.jimple.ThisRef thisRef = soot.jimple.Jimple.v().newThisRef(type);
soot.jimple.Stmt thisStmt = soot.jimple.Jimple.v().newIdentityStmt(specialThisLocal, thisRef);
body.getUnits().add(thisStmt);
// this is causing problems - no this in java code -> no tags
//Util.addLineTag(thisStmt, block);
}
int formalsCounter = 0;
//create outer class this param ref for inner classes except for static inner classes - this is not needed
int outerIndex = sootMethod.getDeclaringClass().getName().lastIndexOf("$");
int classMod = sootMethod.getDeclaringClass().getModifiers();
if ((outerIndex != -1) && (sootMethod.getName().equals("<init>")) && sootMethod.getDeclaringClass().declaresFieldByName("this$0")){
// we know its an inner non static class can get outer class
// from field ref of the this$0 field
soot.SootClass outerClass = ((soot.RefType)sootMethod.getDeclaringClass().getFieldByName("this$0").getType()).getSootClass();
soot.Local outerLocal = lg.generateLocal(outerClass.getType());
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(outerClass.getType(), formalsCounter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(outerLocal, paramRef);
body.getUnits().add(stmt);
((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).setOuterClassThisInit(outerLocal);
outerClassParamLocal = outerLocal;
formalsCounter++;
}
// handle formals
if (formals != null) {
ArrayList formalNames = new ArrayList();
Iterator formalsIt = formals.iterator();
while (formalsIt.hasNext()) {
polyglot.ast.Formal formal = (polyglot.ast.Formal)formalsIt.next();
createFormal(formal, formalsCounter);
formalNames.add(formal.name());
formalsCounter++;
}
body.getMethod().addTag(new soot.tagkit.ParamNamesTag(formalNames));
}
// handle final local params
ArrayList finalsList = ((PolyglotMethodSource)body.getMethod().getSource()).getFinalsList();
if (finalsList != null){
Iterator finalsIt = finalsList.iterator();
while (finalsIt.hasNext()){
soot.SootField sf = (soot.SootField)finalsIt.next();
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(sf.getType(), formalsCounter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(lg.generateLocal(sf.getType()), paramRef);
body.getUnits().add(stmt);
formalsCounter++;
}
}
createBlock(block);
// if method is <clinit> handle static field inits
if (sootMethod.getName().equals("<clinit>")){
handleAssert(sootMethod);
handleStaticFieldInits(sootMethod);
handleStaticInitializerBlocks(sootMethod);
}
// determine if body has a return stmt
boolean hasReturn = false;
if (block != null) {
Iterator it = block.statements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Return){
hasReturn = true;
}
}
}
soot.Type retType = body.getMethod().getReturnType();
// only do this if noexplicit return
if ((!hasReturn) && (retType instanceof soot.VoidType)) {
soot.jimple.Stmt retStmt = soot.jimple.Jimple.v().newReturnVoidStmt();
body.getUnits().add(retStmt);
}
// add exceptions from exceptionTable
if (exceptionTable != null) {
Iterator trapsIt = exceptionTable.iterator();
while (trapsIt.hasNext()){
body.getTraps().add((soot.Trap)trapsIt.next());
}
}
return body;
}
private void handleAssert(soot.SootMethod sootMethod){
if (!((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).hasAssert()) return;
((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).addAssertInits(body);
}
/**
* adds any needed field inits
*/
private void handleFieldInits(soot.SootMethod sootMethod) {
ArrayList fieldInits = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getFieldInits();
if (fieldInits != null) {
handleFieldInits(fieldInits);
}
}
protected void handleFieldInits(ArrayList fieldInits){
Iterator fieldInitsIt = fieldInits.iterator();
while (fieldInitsIt.hasNext()) {
polyglot.ast.FieldDecl field = (polyglot.ast.FieldDecl)fieldInitsIt.next();
String fieldName = field.name();
polyglot.ast.Expr initExpr = field.init();
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
soot.SootField sootField = currentClass.getField(fieldName, Util.getSootType(field.type().type()));
soot.Local base = specialThisLocal;
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(base, sootField);
soot.Value sootExpr;
if (initExpr instanceof polyglot.ast.ArrayInit) {
sootExpr = getArrayInitLocal((polyglot.ast.ArrayInit)initExpr, field.type().type());
}
else {
sootExpr = createExpr(initExpr);
}
if (sootExpr instanceof soot.jimple.ConditionExpr) {
sootExpr = handleCondBinExpr((soot.jimple.ConditionExpr)sootExpr);
}
soot.jimple.AssignStmt assign;
if (sootExpr instanceof soot.Local){
assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, (soot.Local)sootExpr);
}
else if (sootExpr instanceof soot.jimple.Constant){
assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, (soot.jimple.Constant)sootExpr);
}
else {
throw new RuntimeException("fields must assign to local or constant only");
}
body.getUnits().add(assign);
Util.addLnPosTags(assign, initExpr.position());
Util.addLnPosTags(assign.getRightOpBox(), initExpr.position());
}
}
/**
* adds this field for the outer class
*/
private void handleOuterClassThisInit(soot.SootMethod sootMethod) {
// static inner classes are different
if (body.getMethod().getDeclaringClass().declaresFieldByName("this$0")){
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, body.getMethod().getDeclaringClass().getFieldByName("this$0"));
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(fieldRef, outerClassParamLocal);
body.getUnits().add(stmt);
}
}
/**
* adds any needed static field inits
*/
private void handleStaticFieldInits(soot.SootMethod sootMethod) {
ArrayList staticFieldInits = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getStaticFieldInits();
if (staticFieldInits != null) {
Iterator staticFieldInitsIt = staticFieldInits.iterator();
while (staticFieldInitsIt.hasNext()) {
polyglot.ast.FieldDecl field = (polyglot.ast.FieldDecl)staticFieldInitsIt.next();
String fieldName = field.name();
polyglot.ast.Expr initExpr = field.init();
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
soot.SootField sootField = currentClass.getField(fieldName, Util.getSootType(field.type().type()));
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(sootField);
soot.Value sootExpr;
if (initExpr instanceof polyglot.ast.ArrayInit) {
sootExpr = getArrayInitLocal((polyglot.ast.ArrayInit)initExpr, field.type().type());
}
else {
sootExpr = createExpr(initExpr);
}
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, initExpr.position());
}
}
}
/**
* init blocks get created within init methods in Jimple
*/
private void handleInitializerBlocks(soot.SootMethod sootMethod) {
ArrayList initializerBlocks = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getInitializerBlocks();
if (initializerBlocks != null) {
handleStaticBlocks(initializerBlocks);
}
}
protected void handleStaticBlocks(ArrayList initializerBlocks){
Iterator initBlocksIt = initializerBlocks.iterator();
while (initBlocksIt.hasNext()) {
createBlock((polyglot.ast.Block)initBlocksIt.next());
}
}
/**
* static init blocks get created in clinit methods in Jimple
*/
private void handleStaticInitializerBlocks(soot.SootMethod sootMethod) {
ArrayList staticInitializerBlocks = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getStaticInitializerBlocks();
if (staticInitializerBlocks != null) {
Iterator staticInitBlocksIt = staticInitializerBlocks.iterator();
while (staticInitBlocksIt.hasNext()) {
createBlock((polyglot.ast.Block)staticInitBlocksIt.next());
}
}
}
/**
* create body and make it be active
*/
private void createBody(soot.SootMethod sootMethod) {
body = soot.jimple.Jimple.v().newBody(sootMethod);
sootMethod.setActiveBody(body);
}
/**
* Block creation
*/
private void createBlock(polyglot.ast.Block block){
if (block == null) return;
// handle stmts
Iterator it = block.statements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Stmt){
createStmt((polyglot.ast.Stmt)next);
}
else {
throw new RuntimeException("Unexpected - Unhandled Node");
}
}
}
/**
* Catch Formal creation - method parameters
*/
private soot.Local createCatchFormal(polyglot.ast.Formal formal){
soot.Type sootType = Util.getSootType(formal.type().type());
soot.Local formalLocal = createLocal(formal.localInstance());
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, formal.position());
Util.addLnPosTags(((soot.jimple.IdentityStmt) stmt).getRightOpBox(), formal.position());
ArrayList names = new ArrayList();
names.add(formal.name());
stmt.addTag(new soot.tagkit.ParamNamesTag(names));
return formalLocal;
}
/**
* Formal creation - method parameters
*/
private void createFormal(polyglot.ast.Formal formal, int counter){
soot.Type sootType = Util.getSootType(formal.type().type());
soot.Local formalLocal = createLocal(formal.localInstance());
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(sootType, counter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, paramRef);
body.getUnits().add(stmt);
Util.addLnPosTags(((soot.jimple.IdentityStmt) stmt).getRightOpBox(), formal.position());
Util.addLnPosTags(stmt, formal.position());
}
/**
* Literal Creation
*/
private soot.Value createLiteral(polyglot.ast.Lit lit) {
if (lit instanceof polyglot.ast.IntLit) {
polyglot.ast.IntLit intLit = (polyglot.ast.IntLit)lit;
long litValue = intLit.value();
if (intLit.kind() == polyglot.ast.IntLit.INT) {
return soot.jimple.IntConstant.v((int)litValue);
}
else {
return soot.jimple.LongConstant.v(litValue);
}
}
else if (lit instanceof polyglot.ast.StringLit) {
String litValue = ((polyglot.ast.StringLit)lit).value();
return soot.jimple.StringConstant.v(litValue);
}
else if (lit instanceof polyglot.ast.NullLit) {
return soot.jimple.NullConstant.v();
}
else if (lit instanceof polyglot.ast.FloatLit) {
polyglot.ast.FloatLit floatLit = (polyglot.ast.FloatLit)lit;
double litValue = floatLit.value();
if (floatLit.kind() == polyglot.ast.FloatLit.DOUBLE) {
return soot.jimple.DoubleConstant.v(floatLit.value());
}
else {
return soot.jimple.FloatConstant.v((float)(floatLit.value()));
}
}
else if (lit instanceof polyglot.ast.CharLit) {
char litValue = ((polyglot.ast.CharLit)lit).value();
return soot.jimple.IntConstant.v(litValue);
}
else if (lit instanceof polyglot.ast.BooleanLit) {
boolean litValue = ((polyglot.ast.BooleanLit)lit).value();
if (litValue) return soot.jimple.IntConstant.v(1);
else return soot.jimple.IntConstant.v(0);
}
else if (lit instanceof polyglot.ast.ClassLit){
return getSpecialClassLitLocal((polyglot.ast.ClassLit)lit);
}
else {
throw new RuntimeException("Unknown Literal - Unhandled: "+lit.getClass());
}
}
/**
* Local Creation
*/
// this should be used for polyglot locals and formals
private soot.Local createLocal(polyglot.types.LocalInstance localInst) {
soot.Type sootType = Util.getSootType(localInst.type());
String name = localInst.name();
soot.Local sootLocal = createLocal(name, sootType);
localsMap.put(new polyglot.util.IdentityKey(localInst), sootLocal);
return sootLocal;
}
// this should be used for generated locals only
private soot.Local createLocal(String name, soot.Type sootType) {
soot.Local sootLocal = soot.jimple.Jimple.v().newLocal(name, sootType);
body.getLocals().add(sootLocal);
return sootLocal;
}
/**
* Local Retreival
*/
private soot.Local getLocal(polyglot.ast.Local local) {
return getLocal(local.localInstance());
}
/**
* Local Retreival
*/
private soot.Local getLocal(polyglot.types.LocalInstance li) {
if (localsMap.containsKey(new polyglot.util.IdentityKey(li))){
soot.Local sootLocal = (soot.Local)localsMap.get(new polyglot.util.IdentityKey(li));
return sootLocal;
}
else if (body.getMethod().getDeclaringClass().declaresField("val$"+li.name(), Util.getSootType(li.type()))){
soot.Local fieldLocal = generateLocal(li.type());
soot.SootField field = body.getMethod().getDeclaringClass().getField("val$"+li.name(), Util.getSootType(li.type()));
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, field);
soot.jimple.AssignStmt assign = soot.jimple.Jimple.v().newAssignStmt(fieldLocal, fieldRef);
body.getUnits().add(assign);
return fieldLocal;
}
else {
//else create access meth in outer for val$fieldname
// get the this$0 field to find the type of an outer class - has
// to have one because local/anon inner can't declare static
// memebers so for deepnesting not in static context for these
// cases
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
boolean fieldFound = false;
while (!fieldFound){
if (!currentClass.declaresFieldByName("this$0")){
throw new RuntimeException("Trying to get field val$"+li.name()+" from some outer class but can't access the outer class of: "+currentClass.getName()+"!");
}
soot.SootClass outerClass = ((soot.RefType)currentClass.getFieldByName("this$0").getType()).getSootClass();
// look for field of type li.type and name val$li.name in outer
// class
if (outerClass.declaresField("val$"+li.name(), Util.getSootType(li.type()))){
fieldFound = true;
}
currentClass = outerClass;
// repeat until found in some outer class
}
// create and add accessor to that outer class (indic as current)
soot.SootMethod methToInvoke = makeLiFieldAccessMethod(currentClass, li);
// invoke and return
// generate a local that corresponds to the invoke of that meth
ArrayList methParams = new ArrayList();
methParams.add(getThis(currentClass.getType()));
soot.Local res = Util.getPrivateAccessFieldInvoke(methToInvoke, methParams, body, lg);
return res;
}
}
private soot.SootMethod makeLiFieldAccessMethod(soot.SootClass classToInvoke, polyglot.types.LocalInstance li){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
paramTypes.add(classToInvoke.getType());
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, Util.getSootType(li.type()), soot.Modifier.STATIC);
classToInvoke.addMethod(meth);
PrivateFieldAccMethodSource src = new PrivateFieldAccMethodSource();
src.fieldName("val$"+li.name());
src.fieldType(Util.getSootType(li.type()));
src.classToInvoke(classToInvoke);
meth.setActiveBody(src.getBody(meth, null));
return meth;
}
/**
* Stmt creation
*/
private void createStmt(polyglot.ast.Stmt stmt) {
if (stmt instanceof polyglot.ast.Eval) {
createExpr(((polyglot.ast.Eval)stmt).expr());
}
else if (stmt instanceof polyglot.ast.If) {
createIf((polyglot.ast.If)stmt);
}
else if (stmt instanceof polyglot.ast.LocalDecl) {
createLocalDecl((polyglot.ast.LocalDecl)stmt);
}
else if (stmt instanceof polyglot.ast.Block) {
createBlock((polyglot.ast.Block)stmt);
}
else if (stmt instanceof polyglot.ast.While) {
createWhile((polyglot.ast.While)stmt);
}
else if (stmt instanceof polyglot.ast.Do) {
createDo((polyglot.ast.Do)stmt);
}
else if (stmt instanceof polyglot.ast.For) {
createForLoop((polyglot.ast.For)stmt);
}
else if (stmt instanceof polyglot.ast.Switch) {
createSwitch((polyglot.ast.Switch)stmt);
}
else if (stmt instanceof polyglot.ast.Return) {
createReturn((polyglot.ast.Return)stmt);
}
else if (stmt instanceof polyglot.ast.Branch) {
createBranch((polyglot.ast.Branch)stmt);
}
else if (stmt instanceof polyglot.ast.ConstructorCall) {
createConstructorCall((polyglot.ast.ConstructorCall)stmt);
}
else if (stmt instanceof polyglot.ast.Empty) {
// do nothing empty stmt
}
else if (stmt instanceof polyglot.ast.Throw) {
createThrow((polyglot.ast.Throw)stmt);
}
else if (stmt instanceof polyglot.ast.Try) {
createTry((polyglot.ast.Try)stmt);
}
else if (stmt instanceof polyglot.ast.Labeled) {
createLabeled((polyglot.ast.Labeled)stmt);
}
else if (stmt instanceof polyglot.ast.Synchronized) {
createSynchronized((polyglot.ast.Synchronized)stmt);
}
else if (stmt instanceof polyglot.ast.Assert) {
createAssert((polyglot.ast.Assert)stmt);
}
else if (stmt instanceof polyglot.ast.LocalClassDecl) {
createLocalClassDecl((polyglot.ast.LocalClassDecl)stmt);
}
else {
System.out.println("Unhandled Stmt: "+stmt.getClass().toString());
throw new RuntimeException("Unhandled Stmt");
}
}
private boolean needSootIf(soot.Value sootCond){
if (sootCond instanceof soot.jimple.IntConstant){
if (((soot.jimple.IntConstant)sootCond).value == 1){
return false;
}
}
return true;
}
/**
* If Stmts Creation - only add line-number tags to if (the other
* stmts needing tags are created elsewhere
*/
private void createIf(polyglot.ast.If ifExpr){
// handle cond
polyglot.ast.Expr condition = ifExpr.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
// add if
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
if (needIf) {
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
// add consequence
polyglot.ast.Stmt consequence = ifExpr.consequent();
createStmt(consequence);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
// handle alternative
polyglot.ast.Stmt alternative = ifExpr.alternative();
if (alternative != null){
createStmt(alternative);
}
body.getUnits().add(noop2);
}
/**
* While Stmts Creation
*/
private void createWhile(polyglot.ast.While whileStmt){
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
// these are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle body
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
createStmt(whileStmt.body());
body.getUnits().add(noop2);
// handle cond
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
polyglot.ast.Expr condition = whileStmt.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* DoWhile Stmts Creation
*/
private void createDo(polyglot.ast.Do doStmt){
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
// these are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle body
createStmt(doStmt.body());
// handle cond
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
polyglot.ast.Expr condition = doStmt.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addPosTag(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* For Loop Stmts Creation
*/
private void createForLoop(polyglot.ast.For forStmt){
// these ()are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle for inits
Iterator initsIt = forStmt.inits().iterator();
while (initsIt.hasNext()){
createStmt((polyglot.ast.Stmt)initsIt.next());
}
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
// handle body
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
createStmt(forStmt.body());
// handle continue
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
// handle iters
Iterator itersIt = forStmt.iters().iterator();
while (itersIt.hasNext()){
createStmt((polyglot.ast.Stmt)itersIt.next());
}
body.getUnits().add(noop2);
// handle cond
polyglot.ast.Expr condition = forStmt.cond();
if (condition != null) {
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
// add cond
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
}
else {
soot.jimple.Stmt goto2 = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(goto2);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* Local Decl Creation
*/
private void createLocalDecl(polyglot.ast.LocalDecl localDecl) {
String name = localDecl.name();
polyglot.types.LocalInstance localInst = localDecl.localInstance();
soot.Value lhs = createLocal(localInst);
polyglot.ast.Expr expr = localDecl.init();
if (expr != null) {
soot.Value rhs;
if (expr instanceof polyglot.ast.ArrayInit){
rhs = getArrayInitLocal((polyglot.ast.ArrayInit)expr, localInst.type());
}
else {
rhs = createExpr(expr);
}
if (rhs instanceof soot.jimple.ConditionExpr) {
rhs = handleCondBinExpr((soot.jimple.ConditionExpr)rhs);
}
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(lhs, rhs);
body.getUnits().add(stmt);
Util.addLineTag(stmt, localDecl);
Util.addLnPosTags(stmt, localDecl.position());
// this is a special case for position tags
if ( localDecl.position() != null){
Util.addLnPosTags(stmt.getLeftOpBox(), localDecl.position().line(), localDecl.position().endLine(), localDecl.position().endColumn()-name.length(), localDecl.position().endColumn());
if (expr != null){
Util.addLnPosTags(stmt, localDecl.position().line(), expr.position().endLine(), localDecl.position().column(), expr.position().endColumn());
}
else {
Util.addLnPosTags(stmt, localDecl.position().line(), localDecl.position().endLine(), localDecl.position().column(), localDecl.position().endColumn());
}
}
else {
}
if (expr != null){
Util.addLnPosTags(stmt.getRightOpBox(), expr.position());
}
}
}
/**
* Switch Stmts Creation
*/
private void createSwitch(polyglot.ast.Switch switchStmt) {
polyglot.ast.Expr value = switchStmt.expr();
soot.Value sootValue = createExpr(value);
soot.jimple.Stmt defaultTarget = null;
polyglot.ast.Case [] caseArray = new polyglot.ast.Case[switchStmt.elements().size()];
soot.jimple.Stmt [] targetsArray = new soot.jimple.Stmt[switchStmt.elements().size()];
ArrayList targets = new ArrayList();
HashMap targetsMap = new HashMap();
int counter = 0;
Iterator it = switchStmt.elements().iterator();
while (it.hasNext()) {
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
if (!((polyglot.ast.Case)next).isDefault()){
targets.add(noop);
caseArray[counter] = (polyglot.ast.Case)next;
targetsArray[counter] = noop;
counter++;
targetsMap.put(next, noop);
}
else {
defaultTarget = noop;
}
}
}
// sort targets map
int lowIndex = 0;
int highIndex = 0;
for (int i = 0; i < counter; i++) {
for (int j = i+1; j < counter; j++) {
if (caseArray[j].value() < caseArray[i].value()) {
polyglot.ast.Case tempCase = caseArray[i];
soot.jimple.Stmt tempTarget = targetsArray[i];
caseArray[i] = caseArray[j];
targetsArray[i] = targetsArray[j];
caseArray[j] = tempCase;
targetsArray[j] = tempTarget;
}
}
}
ArrayList sortedTargets = new ArrayList();
for (int i = 0; i < counter; i++) {
sortedTargets.add(targetsArray[i]);
}
// deal with default
boolean hasDefaultTarget = true;
if (defaultTarget == null) {
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
defaultTarget = noop;
hasDefaultTarget = false;
}
// lookup or tableswitch
soot.jimple.Stmt sootSwitchStmt;
if (isLookupSwitch(switchStmt)) {
ArrayList values = new ArrayList();
for (int i = 0; i < counter; i++) {
if (!caseArray[i].isDefault()) {
values.add(soot.jimple.IntConstant.v((int)caseArray[i].value()));
}
}
soot.jimple.LookupSwitchStmt lookupStmt = soot.jimple.Jimple.v().newLookupSwitchStmt(sootValue, values, sortedTargets, defaultTarget);
Util.addLnPosTags(lookupStmt.getKeyBox(), value.position());
sootSwitchStmt = lookupStmt;
}
else {
long lowVal = 0;
long highVal = 0;
boolean unknown = true;
it = switchStmt.elements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
if (!((polyglot.ast.Case)next).isDefault()){
long temp = ((polyglot.ast.Case)next).value();
if (unknown){
highVal = temp;
lowVal = temp;
unknown = false;
}
if (temp > highVal) {
highVal = temp;
}
if (temp < lowVal) {
lowVal = temp;
}
}
}
}
soot.jimple.TableSwitchStmt tableStmt = soot.jimple.Jimple.v().newTableSwitchStmt(sootValue, (int)lowVal, (int)highVal, sortedTargets, defaultTarget);
Util.addLnPosTags(tableStmt.getKeyBox(), value.position());
sootSwitchStmt = tableStmt;
}
body.getUnits().add(sootSwitchStmt);
Util.addLnPosTags(sootSwitchStmt, switchStmt.position());
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
it = switchStmt.elements().iterator();
Iterator targetsIt = targets.iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
if (!((polyglot.ast.Case)next).isDefault()){
body.getUnits().add((soot.jimple.Stmt)targetsMap.get(next));
}
else {
body.getUnits().add(defaultTarget);
}
}
else {
polyglot.ast.SwitchBlock blockStmt = (polyglot.ast.SwitchBlock)next;
createBlock(blockStmt);
}
}
if (!hasDefaultTarget) {
body.getUnits().add(defaultTarget);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* Determine if switch should be lookup or table - this doesn't
* always get the same result as javac
* lookup: non-table
* table: sequential (no gaps)
*/
private boolean isLookupSwitch(polyglot.ast.Switch switchStmt){
int lowest = 0;
int highest = 0;
int counter = 0;
Iterator it = switchStmt.elements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
polyglot.ast.Case caseStmt = (polyglot.ast.Case)next;
if (caseStmt.isDefault()) continue;
int caseValue = (int)caseStmt.value();
if (caseValue <= lowest || counter == 0 ) {
lowest = caseValue;
}
if (caseValue >= highest || counter == 0) {
highest = caseValue;
}
counter++;
}
}
if ((counter-1) == (highest - lowest)) return false;
return true;
}
/**
* Branch Stmts Creation
*/
private void createBranch(polyglot.ast.Branch branchStmt){
body.getUnits().add(soot.jimple.Jimple.v().newNopStmt());
if (branchStmt.kind() == polyglot.ast.Branch.BREAK){
if (branchStmt.label() == null) {
soot.jimple.Stmt gotoEndNoop = (soot.jimple.Stmt)endControlNoop.pop();
soot.jimple.Stmt gotoEnd = soot.jimple.Jimple.v().newGotoStmt(gotoEndNoop);
endControlNoop.push(gotoEndNoop);
body.getUnits().add(gotoEnd);
Util.addLnPosTags(gotoEnd, branchStmt.position());
}
else {
soot.jimple.Stmt gotoLabel = soot.jimple.Jimple.v().newGotoStmt((soot.jimple.Stmt)labelBreakMap.get(branchStmt.label()));
body.getUnits().add(gotoLabel);
Util.addLnPosTags(gotoLabel, branchStmt.position());
}
}
else if (branchStmt.kind() == polyglot.ast.Branch.CONTINUE){
if (branchStmt.label() == null) {
soot.jimple.Stmt gotoCondNoop = (soot.jimple.Stmt)condControlNoop.pop();
soot.jimple.Stmt gotoCond = soot.jimple.Jimple.v().newGotoStmt(gotoCondNoop);
condControlNoop.push(gotoCondNoop);
body.getUnits().add(gotoCond);
Util.addLnPosTags(gotoCond, branchStmt.position());
}
else {
soot.jimple.Stmt gotoLabel = soot.jimple.Jimple.v().newGotoStmt((soot.jimple.Stmt)labelContinueMap.get(branchStmt.label()));
body.getUnits().add(gotoLabel);
Util.addLnPosTags(gotoLabel, branchStmt.position());
}
}
}
/**
* Labeled Stmt Creation
*/
private void createLabeled(polyglot.ast.Labeled labeledStmt){
String label = labeledStmt.label();
polyglot.ast.Stmt stmt = labeledStmt.statement();
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop);
if (labelBreakMap == null) {
labelBreakMap = new HashMap();
}
if (labelContinueMap == null) {
labelContinueMap = new HashMap();
}
labelContinueMap.put(label, noop);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
labelBreakMap.put(label, noop2);
createStmt(stmt);
body.getUnits().add(noop2);
// the idea here is to make a map of labels to the first
// jimple stmt of the stmt (a noop) to be created - so
// there is something to look up for breaks and continues
// with labels
}
/**
* Assert Stmt Creation
*/
private void createAssert(polyglot.ast.Assert assertStmt) {
// check if assertions are disabled
soot.Local testLocal = lg.generateLocal(soot.BooleanType.v());
soot.SootField assertField = body.getMethod().getDeclaringClass().getField("$assertionsDisabled", soot.BooleanType.v());
soot.jimple.FieldRef assertFieldRef = soot.jimple.Jimple.v().newStaticFieldRef(assertField);
soot.jimple.AssignStmt fieldAssign = soot.jimple.Jimple.v().newAssignStmt(testLocal, assertFieldRef);
body.getUnits().add(fieldAssign);
soot.jimple.NopStmt nop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.ConditionExpr cond1 = soot.jimple.Jimple.v().newNeExpr(testLocal, soot.jimple.IntConstant.v(0));
soot.jimple.IfStmt testIf = soot.jimple.Jimple.v().newIfStmt(cond1, nop1);
body.getUnits().add(testIf);
// actual cond test
soot.Value sootCond = createExpr(assertStmt.cond());
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
// add if
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, nop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt.getConditionBox(), assertStmt.cond().position());
Util.addLnPosTags(ifStmt, assertStmt.position());
}
// assertion failure code
soot.Local failureLocal = lg.generateLocal(soot.RefType.v("java.lang.AssertionError"));
soot.jimple.NewExpr newExpr = soot.jimple.Jimple.v().newNewExpr(soot.RefType.v("java.lang.AssertionError"));
soot.jimple.AssignStmt newAssign = soot.jimple.Jimple.v().newAssignStmt(failureLocal, newExpr);
body.getUnits().add(newAssign);
soot.SootMethod methToInvoke;
ArrayList paramTypes = new ArrayList();
ArrayList params = new ArrayList();
if (assertStmt.errorMessage() != null){
soot.Value errorExpr = createExpr(assertStmt.errorMessage());
soot.Type errorType = errorExpr.getType();
if (errorType instanceof soot.IntType) {
paramTypes.add(soot.IntType.v());
}
else if (errorType instanceof soot.LongType){
paramTypes.add(soot.LongType.v());
}
else if (errorType instanceof soot.FloatType){
paramTypes.add(soot.FloatType.v());
}
else if (errorType instanceof soot.DoubleType){
paramTypes.add(soot.DoubleType.v());
}
else if (errorType instanceof soot.CharType){
paramTypes.add(soot.CharType.v());
}
else if (errorType instanceof soot.BooleanType){
paramTypes.add(soot.BooleanType.v());
}
else if (errorType instanceof soot.ShortType){
paramTypes.add(soot.IntType.v());
}
else if (errorType instanceof soot.ByteType){
paramTypes.add(soot.IntType.v());
}
else {
paramTypes.add(soot.Scene.v().getSootClass("java.lang.Object").getType());
}
params.add(errorExpr);
}
methToInvoke = soot.Scene.v().getSootClass("java.lang.AssertionError").getMethod("<init>", paramTypes, soot.VoidType.v());
soot.jimple.SpecialInvokeExpr invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(failureLocal, methToInvoke, params);
soot.jimple.InvokeStmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(invokeExpr);
body.getUnits().add(invokeStmt);
soot.jimple.ThrowStmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(failureLocal);
body.getUnits().add(throwStmt);
// end
body.getUnits().add(nop1);
}
/**
* Synchronized Stmt Creation
*/
private void createSynchronized(polyglot.ast.Synchronized synchStmt) {
soot.Value sootExpr = createExpr(synchStmt.expr());
soot.jimple.EnterMonitorStmt enterMon = soot.jimple.Jimple.v().newEnterMonitorStmt(sootExpr);
body.getUnits().add(enterMon);
if (monitorStack == null){
monitorStack = new Stack();
}
monitorStack.push(sootExpr);
Util.addLnPosTags(enterMon.getOpBox(), synchStmt.expr().position());
Util.addLnPosTags(enterMon, synchStmt.expr().position());
soot.jimple.Stmt startNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(startNoop);
createBlock(synchStmt.body());
soot.jimple.ExitMonitorStmt exitMon = soot.jimple.Jimple.v().newExitMonitorStmt(sootExpr);
body.getUnits().add(exitMon);
monitorStack.pop();
Util.addLnPosTags(exitMon.getOpBox(), synchStmt.expr().position());
Util.addLnPosTags(exitMon, synchStmt.expr().position());
soot.jimple.Stmt endSynchNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt gotoEnd = soot.jimple.Jimple.v().newGotoStmt(endSynchNoop);
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(endNoop);
body.getUnits().add(gotoEnd);
soot.jimple.Stmt catchAllBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAllBeforeNoop);
// catch all
soot.Local formalLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
// catch
soot.jimple.Stmt catchBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchBeforeNoop);
soot.Local local = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, formalLocal);
body.getUnits().add(assign);
soot.jimple.ExitMonitorStmt catchExitMon = soot.jimple.Jimple.v().newExitMonitorStmt(sootExpr);
body.getUnits().add(catchExitMon);
Util.addLnPosTags(catchExitMon.getOpBox(), synchStmt.expr().position());
soot.jimple.Stmt catchAfterNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAfterNoop);
// throw
soot.jimple.Stmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(local);
body.getUnits().add(throwStmt);
body.getUnits().add(endSynchNoop);
addToExceptionList(startNoop, endNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
addToExceptionList(catchBeforeNoop, catchAfterNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
/**
* Return Stmts Creation
*/
private void createReturn(polyglot.ast.Return retStmt) {
polyglot.ast.Expr expr = retStmt.expr();
soot.Value sootLocal = null;
if (expr != null){
sootLocal = createExpr(expr);
}
// handle monitor exits before return if necessary
if (monitorStack != null){
Stack putBack = new Stack();
while (!monitorStack.isEmpty()){
soot.Local exitVal = (soot.Local)monitorStack.pop();
putBack.push(exitVal);
soot.jimple.ExitMonitorStmt emStmt = soot.jimple.Jimple.v().newExitMonitorStmt(exitVal);
body.getUnits().add(emStmt);
}
while(!putBack.isEmpty()){
monitorStack.push(putBack.pop());
}
}
//handle finally blocks before return if inside try block
if (tryStack != null && !tryStack.isEmpty()){
polyglot.ast.Try currentTry = (polyglot.ast.Try)tryStack.pop();
if (currentTry.finallyBlock() != null){
createBlock(currentTry.finallyBlock());
tryStack.push(currentTry);
// if return stmt contains a return don't create the other return
ReturnStmtChecker rsc = new ReturnStmtChecker();
currentTry.finallyBlock().visit(rsc);
if (rsc.hasRet()){
return;
}
}
else {
tryStack.push(currentTry);
}
}
//handle finally blocks before return if inside catch block
if (catchStack != null && !catchStack.isEmpty()){
polyglot.ast.Try currentTry = (polyglot.ast.Try)catchStack.pop();
if (currentTry.finallyBlock() != null){
createBlock(currentTry.finallyBlock());
catchStack.push(currentTry);
// if return stmt contains a return don't create the other return
ReturnStmtChecker rsc = new ReturnStmtChecker();
currentTry.finallyBlock().visit(rsc);
if (rsc.hasRet()){
return;
}
}
else {
catchStack.push(currentTry);
}
}
// return
if (expr == null) {
soot.jimple.Stmt retStmtVoid = soot.jimple.Jimple.v().newReturnVoidStmt();
body.getUnits().add(retStmtVoid);
Util.addLnPosTags(retStmtVoid, retStmt.position());
}
else {
//soot.Value sootLocal = createExpr(expr);
if (sootLocal instanceof soot.jimple.ConditionExpr) {
sootLocal = handleCondBinExpr((soot.jimple.ConditionExpr)sootLocal);
}
soot.jimple.ReturnStmt retStmtLocal = soot.jimple.Jimple.v().newReturnStmt(sootLocal);
body.getUnits().add(retStmtLocal);
Util.addLnPosTags(retStmtLocal.getOpBox(), expr.position());
Util.addLnPosTags(retStmtLocal, retStmt.position());
}
}
/**
* Throw Stmt Creation
*/
private void createThrow(polyglot.ast.Throw throwStmt){
soot.Value toThrow = createExpr(throwStmt.expr());
soot.jimple.ThrowStmt throwSt = soot.jimple.Jimple.v().newThrowStmt(toThrow);
body.getUnits().add(throwSt);
Util.addLnPosTags(throwSt, throwStmt.position());
Util.addLnPosTags(throwSt.getOpBox(), throwStmt.expr().position());
}
/**
* Try Stmt Creation
*/
private void createTry(polyglot.ast.Try tryStmt) {
polyglot.ast.Block finallyBlock = tryStmt.finallyBlock();
if (finallyBlock == null) {
createTryCatch(tryStmt);
}
else {
createTryCatchFinally(tryStmt);
}
}
/**
* handles try/catch (try/catch/finally is separate for simplicity)
*/
private void createTryCatch(polyglot.ast.Try tryStmt){
// try
polyglot.ast.Block tryBlock = tryStmt.tryBlock();
// this nop is for the fromStmt of try
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
if (tryStack == null){
tryStack = new Stack();
}
tryStack.push(tryStmt);
createBlock(tryBlock);
tryStack.pop();
// this nop is for the toStmt of try
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// create end nop for after entire try/catch
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt tryEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(tryEndGoto);
Iterator it = tryStmt.catchBlocks().iterator();
while (it.hasNext()) {
soot.jimple.Stmt noop3 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop3);
// create catch stmts
polyglot.ast.Catch catchBlock = (polyglot.ast.Catch)it.next();
// create catch ref
createCatchFormal(catchBlock.formal());
if (catchStack == null){
catchStack = new Stack();
}
catchStack.push(tryStmt);
createBlock(catchBlock.body());
catchStack.pop();
soot.jimple.Stmt catchEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchEndGoto);
soot.Type sootType = Util.getSootType(catchBlock.catchType());
addToExceptionList(noop1, noop2, noop3, soot.Scene.v().getSootClass(sootType.toString()));
}
body.getUnits().add(endNoop);
}
/**
* handles try/catch/finally (try/catch is separate for simplicity)
*/
private void createTryCatchFinally(polyglot.ast.Try tryStmt){
HashMap gotoMap = new HashMap();
// try
polyglot.ast.Block tryBlock = tryStmt.tryBlock();
// this nop is for the fromStmt of try
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
if (tryStack == null){
tryStack = new Stack();
}
tryStack.push(tryStmt);
createBlock(tryBlock);
tryStack.pop();
// this nop is for the toStmt of try
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// create end nop for after entire try/catch
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
// to finally
soot.jimple.Stmt tryGotoFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(tryGotoFinallyNoop);
soot.jimple.Stmt tryFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt tryGotoFinally = soot.jimple.Jimple.v().newGotoStmt(tryFinallyNoop);
body.getUnits().add(tryGotoFinally);
// goto end stmts
soot.jimple.Stmt beforeEndGotoNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeEndGotoNoop);
soot.jimple.Stmt tryEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(tryEndGoto);
gotoMap.put(tryFinallyNoop, beforeEndGotoNoop);
// catch section
soot.jimple.Stmt catchAllBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
Iterator it = tryStmt.catchBlocks().iterator();
while (it.hasNext()) {
soot.jimple.Stmt noop3 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop3);
// create catch stmts
polyglot.ast.Catch catchBlock = (polyglot.ast.Catch)it.next();
// create catch ref
soot.jimple.Stmt catchRefNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchRefNoop);
createCatchFormal(catchBlock.formal());
soot.jimple.Stmt catchStmtsNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchStmtsNoop);
if (catchStack == null){
catchStack = new Stack();
}
catchStack.push(tryStmt);
createBlock(catchBlock.body());
catchStack.pop();
// to finally
soot.jimple.Stmt catchGotoFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchGotoFinallyNoop);
soot.jimple.Stmt catchFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt catchGotoFinally = soot.jimple.Jimple.v().newGotoStmt(catchFinallyNoop);
body.getUnits().add(catchGotoFinally);
// goto end stmts
soot.jimple.Stmt beforeCatchEndGotoNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeCatchEndGotoNoop);
soot.jimple.Stmt catchEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchEndGoto);
gotoMap.put(catchFinallyNoop, beforeCatchEndGotoNoop);
soot.Type sootType = Util.getSootType(catchBlock.catchType());
addToExceptionList(noop1, noop2, noop3, soot.Scene.v().getSootClass(sootType.toString()));
addToExceptionList(catchStmtsNoop, beforeCatchEndGotoNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
// catch all ref
soot.Local formalLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
body.getUnits().add(catchAllBeforeNoop);
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
// catch all assign
soot.jimple.Stmt beforeCatchAllAssignNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeCatchAllAssignNoop);
soot.Local catchAllAssignLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.Stmt catchAllAssign = soot.jimple.Jimple.v().newAssignStmt(catchAllAssignLocal, formalLocal);
body.getUnits().add(catchAllAssign);
// catch all finally
soot.jimple.Stmt catchAllFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt catchAllGotoFinally = soot.jimple.Jimple.v().newGotoStmt(catchAllFinallyNoop);
body.getUnits().add(catchAllGotoFinally);
// catch all throw
soot.jimple.Stmt catchAllBeforeThrowNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAllBeforeThrowNoop);
soot.jimple.Stmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(catchAllAssignLocal);
throwStmt.addTag(new soot.tagkit.ThrowCreatedByCompilerTag());
body.getUnits().add(throwStmt);
gotoMap.put(catchAllFinallyNoop, catchAllBeforeThrowNoop);
// catch all goto end
soot.jimple.Stmt catchAllGotoEnd = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchAllGotoEnd);
addToExceptionList(beforeCatchAllAssignNoop, catchAllBeforeThrowNoop ,catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
// create finally's
Iterator finallyIt = gotoMap.keySet().iterator();
while (finallyIt.hasNext()) {
soot.jimple.Stmt noopStmt = (soot.jimple.Stmt)finallyIt.next();
body.getUnits().add(noopStmt);
createBlock(tryStmt.finallyBlock());
soot.jimple.Stmt backToStmt = (soot.jimple.Stmt)gotoMap.get(noopStmt);
soot.jimple.Stmt backToGoto = soot.jimple.Jimple.v().newGotoStmt(backToStmt);
body.getUnits().add(backToGoto);
}
body.getUnits().add(endNoop);
addToExceptionList(noop1, beforeEndGotoNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
/**
* add exceptions to a list that gets added at end of method
*/
private void addToExceptionList(soot.jimple.Stmt from, soot.jimple.Stmt to, soot.jimple.Stmt with, soot.SootClass exceptionClass) {
if (exceptionTable == null) {
exceptionTable = new ArrayList();
}
soot.Trap trap = soot.jimple.Jimple.v().newTrap(exceptionClass, from, to, with);
exceptionTable.add(trap);
}
/**
* Expression Creation
*/
private soot.Value createExpr(polyglot.ast.Expr expr){
if (expr instanceof polyglot.ast.Assign) {
return getAssignLocal((polyglot.ast.Assign)expr);
}
else if (expr instanceof polyglot.ast.Lit) {
return createLiteral((polyglot.ast.Lit)expr);
}
else if (expr instanceof polyglot.ast.Local) {
return getLocal((polyglot.ast.Local)expr);
}
else if (expr instanceof polyglot.ast.Binary) {
return getBinaryLocal((polyglot.ast.Binary)expr);
}
else if (expr instanceof polyglot.ast.Unary) {
return getUnaryLocal((polyglot.ast.Unary)expr);
}
else if (expr instanceof polyglot.ast.Cast) {
return getCastLocal((polyglot.ast.Cast)expr);
}
//else if (expr instanceof polyglot.ast.ArrayInit) {
// array init are special and get created elsewhere
else if (expr instanceof polyglot.ast.ArrayAccess) {
return getArrayRefLocal((polyglot.ast.ArrayAccess)expr);
}
else if (expr instanceof polyglot.ast.NewArray) {
return getNewArrayLocal((polyglot.ast.NewArray)expr);
}
else if (expr instanceof polyglot.ast.Call) {
return getCallLocal((polyglot.ast.Call)expr);
}
else if (expr instanceof polyglot.ast.New) {
return getNewLocal((polyglot.ast.New)expr);
}
else if (expr instanceof polyglot.ast.Special) {
return getSpecialLocal((polyglot.ast.Special)expr);
}
else if (expr instanceof polyglot.ast.Instanceof) {
return getInstanceOfLocal((polyglot.ast.Instanceof)expr);
}
else if (expr instanceof polyglot.ast.Conditional) {
return getConditionalLocal((polyglot.ast.Conditional)expr);
}
else if (expr instanceof polyglot.ast.Field) {
return getFieldLocal((polyglot.ast.Field)expr);
}
else {
System.out.println("Expr: "+expr);
throw new RuntimeException("Unhandled Expression: ");
}
}
private soot.Local handlePrivateFieldSet(polyglot.ast.Assign assign){
polyglot.ast.Field fLeft = (polyglot.ast.Field)assign.left();
//soot.Value right = createExpr(assign.right());
// if assign is not = but +=, -=, *=, /=, >>=, >>>-, <<=, %=,
// |= &= or ^= then compute it all into a local first
//if (assign.operator() != polyglot.ast.Assign.ASSIGN){
// in this cas can cast to local (never a string const here
// as it has to be a lhs
soot.Value right;
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
right = getSimpleAssignRightLocal(assign);
}
else if ((assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) && assign.type().toString().equals("java.lang.String")){
right = getStringConcatAssignRightLocal(assign);
}
else {
soot.Local leftLocal = (soot.Local)getFieldLocal(fLeft);
right = getAssignRightLocal(assign, leftLocal);
}
//else {
// right = createExpr(assign.right());
soot.SootClass containClass = ((soot.RefType)Util.getSootType(fLeft.target().type())).getSootClass();
soot.SootMethod methToUse = addSetAccessMeth(containClass, fLeft, right);
ArrayList params = new ArrayList();
if (!fLeft.flags().isStatic()){
// this is the this ref if needed
params.add(getThis(Util.getSootType(fLeft.target().type())));
}
params.add(right);
soot.jimple.InvokeExpr invoke = soot.jimple.Jimple.v().newStaticInvokeExpr(methToUse, params);
soot.Local retLocal = lg.generateLocal(right.getType());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invoke);
body.getUnits().add(assignStmt);
return retLocal;
}
private soot.SootMethod addSetAccessMeth(soot.SootClass conClass, polyglot.ast.Field field, soot.Value param){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
if (!field.flags().isStatic()){
// add this param type
paramTypes.add(Util.getSootType(field.target().type()));
}
paramTypes.add(param.getType());
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, param.getType(), soot.Modifier.STATIC);
PrivateFieldSetMethodSource pfsms = new PrivateFieldSetMethodSource();
pfsms.fieldName(field.name());
pfsms.fieldType(Util.getSootType(field.type()));
pfsms.setFieldInst(field.fieldInstance());
conClass.addMethod(meth);
meth.setActiveBody(pfsms.getBody(meth, null));
return meth;
}
private soot.Value getAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
return getSimpleAssignRightLocal(assign);
}
else if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN && assign.type().toString().equals("java.lang.String")){
return getStringConcatAssignRightLocal(assign);
}
else {
return getComplexAssignRightLocal(assign, leftLocal);
}
}
private soot.Value getSimpleAssignRightLocal(polyglot.ast.Assign assign){
soot.Value right = createExpr(assign.right());
if (right instanceof soot.jimple.ConditionExpr) {
right = handleCondBinExpr((soot.jimple.ConditionExpr)right);
}
return right;
}
private soot.Local getStringConcatAssignRightLocal(polyglot.ast.Assign assign){
soot.Local sb = (soot.Local)createStringBuffer(assign);
generateAppends(assign.left(), sb);
generateAppends(assign.right(), sb);
soot.Local rLocal = createToString(sb, assign);
return rLocal;
}
private soot.Local getComplexAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
soot.Value right = createExpr(assign.right());
if (right instanceof soot.jimple.ConditionExpr) {
right = handleCondBinExpr((soot.jimple.ConditionExpr)right);
}
soot.jimple.BinopExpr binop = null;
if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) {
binop = soot.jimple.Jimple.v().newAddExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SUB_ASSIGN){
binop = soot.jimple.Jimple.v().newSubExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.MUL_ASSIGN) {
binop = soot.jimple.Jimple.v().newMulExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.DIV_ASSIGN) {
binop = soot.jimple.Jimple.v().newDivExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.MOD_ASSIGN) {
binop = soot.jimple.Jimple.v().newRemExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SHL_ASSIGN) {
binop = soot.jimple.Jimple.v().newShlExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SHR_ASSIGN) {
binop = soot.jimple.Jimple.v().newShrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.USHR_ASSIGN) {
binop = soot.jimple.Jimple.v().newUshrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_AND_ASSIGN) {
binop = soot.jimple.Jimple.v().newAndExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_OR_ASSIGN) {
binop = soot.jimple.Jimple.v().newOrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_XOR_ASSIGN) {
binop = soot.jimple.Jimple.v().newXorExpr(leftLocal, right);
}
soot.Local retLocal = lg.generateLocal(leftLocal.getType());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, binop);
body.getUnits().add(assignStmt);
Util.addLnPosTags(binop.getOp1Box(), assign.left().position());
Util.addLnPosTags(binop.getOp2Box(), assign.right().position());
return retLocal;
}
private soot.Value getSimpleAssignLocal(polyglot.ast.Assign assign){
soot.jimple.AssignStmt stmt;
soot.Value left = createLHS(assign.left());
soot.Value right = getSimpleAssignRightLocal(assign);
stmt = soot.jimple.Jimple.v().newAssignStmt(left, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (left instanceof soot.Local){
return left;
}
else {
return right;
}
}
private soot.Value getStrConAssignLocal(polyglot.ast.Assign assign){
soot.jimple.AssignStmt stmt;
soot.Value left = createLHS(assign.left());
soot.Value right = getStringConcatAssignRightLocal(assign);
stmt = soot.jimple.Jimple.v().newAssignStmt(left, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (left instanceof soot.Local){
return left;
}
else {
return right;
}
}
/**
* Assign Expression Creation
*/
private soot.Value getAssignLocal(polyglot.ast.Assign assign) {
// handle private access field assigns
HashMap accessMap = ((PolyglotMethodSource)body.getMethod().getSource()).getPrivateAccessMap();
if ((assign.left() instanceof polyglot.ast.Field) && (accessMap != null) && accessMap.containsKey(((polyglot.ast.Field)assign.left()).fieldInstance())){
return handlePrivateFieldSet(assign);
}
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
return getSimpleAssignLocal(assign);
}
if ((assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) && assign.type().toString().equals("java.lang.String")){
return getStrConAssignLocal(assign);
}
soot.jimple.AssignStmt stmt;
soot.Value left = createLHS(assign.left());
soot.Local leftLocal;
if (left instanceof soot.Local){
leftLocal = (soot.Local)left;
}
else {
leftLocal = lg.generateLocal(left.getType());
stmt = soot.jimple.Jimple.v().newAssignStmt(leftLocal, left);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
}
soot.Value right = getAssignRightLocal(assign, leftLocal);
stmt = soot.jimple.Jimple.v().newAssignStmt(leftLocal, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (!(left instanceof soot.Local)) {
stmt = soot.jimple.Jimple.v().newAssignStmt(left, leftLocal);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
}
return leftLocal;
/*if (binop != null){
Util.addLnPosTags(binop.getOp1Box(), assign.left().position());
Util.addLnPosTags(binop.getOp2Box(), assign.right().position());
}*/
/*Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
Util.addLnPosTags(stmt, assign.position());
if (!(left instanceof soot.Local)) {
stmt = soot.jimple.Jimple.v().newAssignStmt(left, leftLocal);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
}
return leftLocal;*/
}
/**
* Field Expression Creation - LHS
*/
private soot.Value getFieldLocalLeft(polyglot.ast.Field field){
polyglot.ast.Receiver receiver = field.target();
if ((field.name().equals("length")) && (receiver.type() instanceof polyglot.types.ArrayType)){
return getSpecialArrayLengthLocal(field);
}
else {
return getFieldRef(field);
}
}
/**
* Field Expression Creation
*/
private soot.Value getFieldLocal(polyglot.ast.Field field){
polyglot.ast.Receiver receiver = field.target();
soot.javaToJimple.PolyglotMethodSource ms = (soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource();
if ((field.name().equals("length")) && (receiver.type() instanceof polyglot.types.ArrayType)){
return getSpecialArrayLengthLocal(field);
}
else if (field.name().equals("class")){
throw new RuntimeException("Should go through ClassLit");
}
else if ((ms.getPrivateAccessMap() != null) && (ms.getPrivateAccessMap().containsKey(field.fieldInstance()))){
return getPrivateAccessFieldLocal(field);
}
if ((field.target() instanceof polyglot.ast.Special) && (((polyglot.ast.Special)field.target()).kind() == polyglot.ast.Special.SUPER) && (((polyglot.ast.Special)field.target()).qualifier() != null)){
return getSpecialSuperQualifierLocal(field);
}
else if (shouldReturnConstant(field)){
return getReturnConstant(field);
// in this case don't return fieldRef but a string constant
}
else {
soot.jimple.FieldRef fieldRef = getFieldRef(field);
soot.Local baseLocal = generateLocal(field.type());
soot.jimple.AssignStmt fieldAssignStmt = soot.jimple.Jimple.v().newAssignStmt(baseLocal, fieldRef);
body.getUnits().add(fieldAssignStmt);
Util.addLnPosTags(fieldAssignStmt, field.position());
return baseLocal;
}
}
private soot.jimple.Constant getReturnConstant(polyglot.ast.Field field){
System.out.println("field cons: "+field);
System.out.println("field type: "+field.type());
System.out.println("field cons type: "+field.fieldInstance().constantValue().getClass());
if (field.fieldInstance().constantValue() instanceof String){
return soot.jimple.StringConstant.v((String)field.constantValue());
}
else if (field.fieldInstance().constantValue() instanceof Boolean){
boolean val = ((Boolean)field.constantValue()).booleanValue();
return soot.jimple.IntConstant.v(val ? 1 : 0);
}
else if (field.fieldInstance().constantValue() instanceof Character){
char val = ((Character)field.constantValue()).charValue();
return soot.jimple.IntConstant.v(val);
}
else {//if (field.fieldInstance().constantValue() instanceof Number){
Number num = (Number)field.fieldInstance().constantValue();
num = createConstantCast(field.type(), num);
if (num instanceof Long) {
return soot.jimple.LongConstant.v(((Long)num).longValue());
}
else if (num instanceof Double) {
return soot.jimple.DoubleConstant.v(((Double)num).doubleValue());
}
else if (num instanceof Float) {
return soot.jimple.FloatConstant.v(((Float)num).floatValue());
}
else {//if (num instanceof Long) {
return soot.jimple.IntConstant.v(((Integer)num).intValue());
}
}
}
private Number createConstantCast(polyglot.types.Type fieldType, Number constant) {
if (constant instanceof Integer){
if (fieldType.isDouble()){
return new Double((double)((Integer)constant).intValue());
}
else if (fieldType.isFloat()){
return new Float((float)((Integer)constant).intValue());
}
else if (fieldType.isLong()){
return new Long((long)((Integer)constant).intValue());
}
}
return constant;
}
private boolean shouldReturnConstant(polyglot.ast.Field field){
if (field.fieldInstance().isConstant()) {// && (field.fieldInstance().constantValue() instanceof String)){
return true;
}
return false;
}
/**
* creates a field ref
*/
private soot.jimple.FieldRef getFieldRef(polyglot.ast.Field field) {
soot.SootClass receiverClass = ((soot.RefType)Util.getSootType(field.fieldInstance().container())).getSootClass();
soot.SootField receiverField = receiverClass.getField(field.name(), Util.getSootType(field.type()));
soot.jimple.FieldRef fieldRef;
if (field.fieldInstance().flags().isStatic()) {
fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(receiverField);
}
else {
soot.Local base;
base = (soot.Local)getBaseLocal(field.target());
fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(base, receiverField);
}
if (field.target() instanceof polyglot.ast.Local && fieldRef instanceof soot.jimple.InstanceFieldRef){
Util.addLnPosTags(((soot.jimple.InstanceFieldRef)fieldRef).getBaseBox(), field.target().position());
}
return fieldRef;
}
/**
* For Inner Classes - to access private fields of their outer class
*/
private soot.Local getPrivateAccessFieldLocal(polyglot.ast.Field field) {
HashMap paMap = ((soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource()).getPrivateAccessMap();
soot.SootMethod toInvoke = (soot.SootMethod)paMap.get(field.fieldInstance());
ArrayList params = new ArrayList();
if (!field.fieldInstance().flags().isStatic()) {
params.add((soot.Local)getBaseLocal(field.target()));
}
return Util.getPrivateAccessFieldInvoke(toInvoke, params, body, lg);
}
/**
* To get the local for the special .class literal
*/
private soot.Local getSpecialClassLitLocal(polyglot.ast.ClassLit lit) {
if (lit.typeNode().type().isPrimitive()){
polyglot.types.PrimitiveType primType = (polyglot.types.PrimitiveType)lit.typeNode().type();
soot.Local retLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.SootField primField = null;
if (primType.isBoolean()){
primField = soot.Scene.v().getSootClass("java.lang.Boolean").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isByte()){
primField = soot.Scene.v().getSootClass("java.lang.Byte").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isChar()){
primField = soot.Scene.v().getSootClass("java.lang.Character").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isDouble()){
primField = soot.Scene.v().getSootClass("java.lang.Double").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isFloat()){
primField = soot.Scene.v().getSootClass("java.lang.Float").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isInt()){
primField = soot.Scene.v().getSootClass("java.lang.Integer").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isLong()){
primField = soot.Scene.v().getSootClass("java.lang.Long").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isShort()){
primField = soot.Scene.v().getSootClass("java.lang.Short").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isVoid()){
primField = soot.Scene.v().getSootClass("java.lang.Void").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
soot.jimple.StaticFieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(primField);
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, fieldRef);
body.getUnits().add(assignStmt);
return retLocal;
}
else {
// this class
soot.SootClass thisClass = body.getMethod().getDeclaringClass();
String fieldName = Util.getFieldNameForClassLit(lit.typeNode().type());
soot.Type fieldType = soot.RefType.v("java.lang.Class");
soot.Local fieldLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.SootField sootField = null;
if (thisClass.isInterface()){
HashMap specialAnonMap = InitialResolver.v().specialAnonMap();
if ((specialAnonMap != null) && (specialAnonMap.containsKey(thisClass))){
soot.SootClass specialClass = (soot.SootClass)specialAnonMap.get(thisClass);
sootField = specialClass.getField(fieldName, fieldType);
}
else {
throw new RuntimeException("Class is interface so it must have an anon class to handle class lits but its anon class cannot be found.");
}
}
else {
sootField = thisClass.getField(fieldName, fieldType);
}
soot.jimple.StaticFieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(sootField);
soot.jimple.Stmt fieldAssign = soot.jimple.Jimple.v().newAssignStmt(fieldLocal, fieldRef);
body.getUnits().add(fieldAssign);
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Expr neExpr = soot.jimple.Jimple.v().newNeExpr(fieldLocal, soot.jimple.NullConstant.v());
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(neExpr, noop1);
body.getUnits().add(ifStmt);
ArrayList paramTypes = new ArrayList();
paramTypes.add(soot.RefType.v("java.lang.String"));
soot.SootMethod invokeMeth = null;
if (thisClass.isInterface()){
HashMap specialAnonMap = InitialResolver.v().specialAnonMap();
if ((specialAnonMap != null) && (specialAnonMap.containsKey(thisClass))){
soot.SootClass specialClass = (soot.SootClass)specialAnonMap.get(thisClass);
invokeMeth = specialClass.getMethod("class$", paramTypes, soot.RefType.v("java.lang.Class"));
}
else {
throw new RuntimeException("Class is interface so it must have an anon class to handle class lits but its anon class cannot be found.");
}
}
else {
invokeMeth = thisClass.getMethod("class$", paramTypes, soot.RefType.v("java.lang.Class"));
}
ArrayList params = new ArrayList();
params.add(soot.jimple.StringConstant.v(Util.getParamNameForClassLit(lit.typeNode().type())));
soot.jimple.Expr classInvoke = soot.jimple.Jimple.v().newStaticInvokeExpr(invokeMeth, params);
soot.Local methLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.jimple.Stmt invokeAssign = soot.jimple.Jimple.v().newAssignStmt(methLocal, classInvoke);
body.getUnits().add(invokeAssign);
soot.jimple.Stmt assignField = soot.jimple.Jimple.v().newAssignStmt(fieldRef, methLocal);
body.getUnits().add(assignField);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
fieldAssign = soot.jimple.Jimple.v().newAssignStmt(methLocal, fieldRef);
body.getUnits().add(fieldAssign);
body.getUnits().add(noop2);
return methLocal;
}
}
/**
* Array Length local for example a.length w/o brackets gets length
* of array
*/
private soot.Local getSpecialArrayLengthLocal(polyglot.ast.Field field) {
soot.Local localField;
polyglot.ast.Receiver receiver = field.target();
if (receiver instanceof polyglot.ast.Local) {
localField = getLocal((polyglot.ast.Local)receiver);
}
else if (receiver instanceof polyglot.ast.Expr){
localField = (soot.Local)createExpr((polyglot.ast.Expr)receiver);
}
else {
localField = generateLocal(receiver.type());
}
soot.jimple.LengthExpr lengthExpr = soot.jimple.Jimple.v().newLengthExpr(localField);
soot.Local retLocal = lg.generateLocal(soot.IntType.v());
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, lengthExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, field.position());
Util.addLnPosTags(lengthExpr.getOpBox(), field.target().position());
return retLocal;
}
/**
* Binary Expression Creation
*/
private soot.Value getBinaryLocal(polyglot.ast.Binary binary) {
soot.Value rhs;
if (binary.operator() == polyglot.ast.Binary.COND_AND) {
return createCondAnd(binary);
}
if (binary.operator() == polyglot.ast.Binary.COND_OR) {
return createCondOr(binary);
}
if (binary.type().toString().equals("java.lang.String")){
if (areAllStringLits(binary)){
String result = createStringConstant(binary);
return soot.jimple.StringConstant.v(result);
}
else {
soot.Local sb = (soot.Local)createStringBuffer(binary);
generateAppends(binary.left(), sb);
generateAppends(binary.right(), sb);
return createToString(sb, binary);
}
}
soot.Value lVal = createExpr(binary.left());
soot.Value rVal = createExpr(binary.right());
if (isComparisonBinary(binary.operator())) {
rhs = getBinaryComparisonExpr(lVal, rVal, binary.operator());
}
else {
rhs = getBinaryExpr(lVal, rVal, binary.operator());
}
if (rhs instanceof soot.jimple.BinopExpr) {
Util.addLnPosTags(((soot.jimple.BinopExpr)rhs).getOp1Box(), binary.left().position());
Util.addLnPosTags(((soot.jimple.BinopExpr)rhs).getOp2Box(), binary.right().position());
}
if (rhs instanceof soot.jimple.ConditionExpr) {
return rhs;
}
soot.Local lhs = generateLocal(binary.type());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(lhs, rhs);
body.getUnits().add(assignStmt);
Util.addLnPosTags(assignStmt.getRightOpBox(), binary.position());
return lhs;
}
private boolean areAllStringLits(polyglot.ast.Node node){
if (node instanceof polyglot.ast.StringLit) return true;
else if ( node instanceof polyglot.ast.Field) {
if (shouldReturnConstant((polyglot.ast.Field)node)) return true;
else return false;
}
else if (node instanceof polyglot.ast.Binary){
if (areAllStringLitsBinary((polyglot.ast.Binary)node)) return true;
return false;
}
return false;
}
private boolean areAllStringLitsBinary(polyglot.ast.Binary binary){
if (areAllStringLits(binary.left()) && areAllStringLits(binary.right())) return true;
else return false;
}
private String createStringConstant(polyglot.ast.Node node){
String s = null;
if (node instanceof polyglot.ast.StringLit){
s = ((polyglot.ast.StringLit)node).value();
}
else if (node instanceof polyglot.ast.Field){
s = (((polyglot.ast.Field)node).fieldInstance().constantValue()).toString();
}
else if (node instanceof polyglot.ast.Binary){
s = createStringConstantBinary((polyglot.ast.Binary)node);
}
else {
throw new RuntimeException("No other string constant folding done");
}
return s;
}
private String createStringConstantBinary(polyglot.ast.Binary binary){
String s = createStringConstant(binary.left())+ createStringConstant(binary.right());
return s;
}
private boolean isComparisonBinary(polyglot.ast.Binary.Operator op) {
if ((op == polyglot.ast.Binary.EQ) || (op == polyglot.ast.Binary.NE) ||
(op == polyglot.ast.Binary.GE) || (op == polyglot.ast.Binary.GT) ||
(op == polyglot.ast.Binary.LE) || (op == polyglot.ast.Binary.LT)) {
return true;
}
else {
return false;
}
}
/**
* Creates a binary expression that is not a comparison
*/
private soot.Value getBinaryExpr(soot.Value lVal, soot.Value rVal, polyglot.ast.Binary.Operator operator){
soot.Value rValue = null;
if (lVal instanceof soot.jimple.ConditionExpr) {
lVal = handleCondBinExpr((soot.jimple.ConditionExpr)lVal);
}
if (rVal instanceof soot.jimple.ConditionExpr) {
rVal = handleCondBinExpr((soot.jimple.ConditionExpr)rVal);
}
if (operator == polyglot.ast.Binary.ADD){
rValue = soot.jimple.Jimple.v().newAddExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.SUB){
rValue = soot.jimple.Jimple.v().newSubExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.MUL){
rValue = soot.jimple.Jimple.v().newMulExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.DIV){
rValue = soot.jimple.Jimple.v().newDivExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.SHR){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newShrExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newShrExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.USHR){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newUshrExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newUshrExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.SHL){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newShlExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newShlExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.BIT_AND){
rValue = soot.jimple.Jimple.v().newAndExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.BIT_OR){
rValue = soot.jimple.Jimple.v().newOrExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.BIT_XOR){
rValue = soot.jimple.Jimple.v().newXorExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.MOD){
rValue = soot.jimple.Jimple.v().newRemExpr(lVal, rVal);
}
else {
throw new RuntimeException("Binary not yet handled!");
}
return rValue;
}
/**
* Creates a binary expr that is a comparison
*/
private soot.Value getBinaryComparisonExpr(soot.Value lVal, soot.Value rVal, polyglot.ast.Binary.Operator operator) {
soot.Value rValue;
if (operator == polyglot.ast.Binary.EQ){
rValue = soot.jimple.Jimple.v().newEqExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.GE){
rValue = soot.jimple.Jimple.v().newGeExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.GT){
rValue = soot.jimple.Jimple.v().newGtExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.LE){
rValue = soot.jimple.Jimple.v().newLeExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.LT){
rValue = soot.jimple.Jimple.v().newLtExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.NE){
rValue = soot.jimple.Jimple.v().newNeExpr(lVal, rVal);
}
else {
throw new RuntimeException("Unknown Comparison Expr");
}
return rValue;
}
/**
* in bytecode and Jimple the conditions in conditional binary
* expressions are often reversed
*/
private soot.Value reverseCondition(soot.jimple.ConditionExpr cond) {
soot.jimple.ConditionExpr newExpr;
if (cond instanceof soot.jimple.EqExpr) {
newExpr = soot.jimple.Jimple.v().newNeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.NeExpr) {
newExpr = soot.jimple.Jimple.v().newEqExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.GtExpr) {
newExpr = soot.jimple.Jimple.v().newLeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.GeExpr) {
newExpr = soot.jimple.Jimple.v().newLtExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.LtExpr) {
newExpr = soot.jimple.Jimple.v().newGeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.LeExpr) {
newExpr = soot.jimple.Jimple.v().newGtExpr(cond.getOp1(), cond.getOp2());
}
else {
throw new RuntimeException("Unknown Condition Expr");
}
newExpr.getOp1Box().addAllTagsOf(cond.getOp1Box());
newExpr.getOp2Box().addAllTagsOf(cond.getOp2Box());
return newExpr;
}
/**
* Special conditions for doubles and floats and longs
*/
private soot.Value handleDFLCond(soot.jimple.ConditionExpr cond){
soot.Local result = lg.generateLocal(soot.ByteType.v());
soot.jimple.Expr cmExpr = null;
if (isDouble(cond.getOp1()) || isDouble(cond.getOp2()) || isFloat(cond.getOp1()) || isFloat(cond.getOp2())) {
// use cmpg and cmpl
if ((cond instanceof soot.jimple.GeExpr) || (cond instanceof soot.jimple.GtExpr)) {
// use cmpg
cmExpr = soot.jimple.Jimple.v().newCmpgExpr(cond.getOp1(), cond.getOp2());
}
else {
// use cmpl
cmExpr = soot.jimple.Jimple.v().newCmplExpr(cond.getOp1(), cond.getOp2());
}
}
else if (isLong(cond.getOp1()) || isLong(cond.getOp2())) {
// use cmp
cmExpr = soot.jimple.Jimple.v().newCmpExpr(cond.getOp1(), cond.getOp2());
}
else {
return cond;
}
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(result, cmExpr);
body.getUnits().add(assign);
if (cond instanceof soot.jimple.EqExpr){
cond = soot.jimple.Jimple.v().newEqExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.GeExpr){
cond = soot.jimple.Jimple.v().newGeExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.GtExpr){
cond = soot.jimple.Jimple.v().newGtExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.LeExpr){
cond = soot.jimple.Jimple.v().newLeExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.LtExpr){
cond = soot.jimple.Jimple.v().newLtExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.NeExpr){
cond = soot.jimple.Jimple.v().newNeExpr(result, soot.jimple.IntConstant.v(0));
}
else {
throw new RuntimeException("Unknown Comparison Expr");
}
return cond;
}
private boolean isDouble(soot.Value val) {
if (val.getType() instanceof soot.DoubleType) return true;
return false;
}
private boolean isFloat(soot.Value val) {
if (val.getType() instanceof soot.FloatType) return true;
return false;
}
private boolean isLong(soot.Value val) {
if (val.getType() instanceof soot.LongType) return true;
return false;
}
/**
* Creates a conitional AND expr
*/
private soot.Local createCondAnd(polyglot.ast.Binary binary) {
soot.Local retLocal = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value lVal = createExpr(binary.left());
boolean leftNeedIf = needSootIf(lVal);
if (!(lVal instanceof soot.jimple.ConditionExpr)) {
lVal = soot.jimple.Jimple.v().newEqExpr(lVal, soot.jimple.IntConstant.v(0));
}
else {
lVal = reverseCondition((soot.jimple.ConditionExpr)lVal);
lVal = handleDFLCond((soot.jimple.ConditionExpr)lVal);
}
if (leftNeedIf){
soot.jimple.IfStmt ifLeft = soot.jimple.Jimple.v().newIfStmt(lVal, noop1);
body.getUnits().add(ifLeft);
Util.addLnPosTags(ifLeft.getConditionBox(), binary.left().position());
}
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.Value rVal = createExpr(binary.right());
boolean rightNeedIf = needSootIf(rVal);
if (!(rVal instanceof soot.jimple.ConditionExpr)) {
rVal = soot.jimple.Jimple.v().newEqExpr(rVal, soot.jimple.IntConstant.v(0));
}
else {
rVal = reverseCondition((soot.jimple.ConditionExpr)rVal);
rVal = handleDFLCond((soot.jimple.ConditionExpr)rVal);
}
if (rightNeedIf){
soot.jimple.IfStmt ifRight = soot.jimple.Jimple.v().newIfStmt(rVal, noop1);
body.getUnits().add(ifRight);
Util.addLnPosTags(ifRight.getConditionBox(), binary.right().position());
}
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(1));
body.getUnits().add(assign1);
soot.jimple.Stmt gotoEnd1 = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(gotoEnd1);
body.getUnits().add(noop1);
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(0));
body.getUnits().add(assign2);
body.getUnits().add(endNoop);
return retLocal;
}
/**
* Creates a conditional OR expr
*/
private soot.Local createCondOr(polyglot.ast.Binary binary) {
soot.Local retLocal = lg.generateLocal(soot.BooleanType.v());
//end
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value lVal = createExpr(binary.left());
boolean leftNeedIf = needSootIf(lVal);
if (!(lVal instanceof soot.jimple.ConditionExpr)) {
lVal = soot.jimple.Jimple.v().newEqExpr(lVal, soot.jimple.IntConstant.v(1));
}
else {
lVal = handleDFLCond((soot.jimple.ConditionExpr)lVal);
}
if (leftNeedIf){
soot.jimple.IfStmt ifLeft = soot.jimple.Jimple.v().newIfStmt(lVal, noop1);
body.getUnits().add(ifLeft);
Util.addLnPosTags(ifLeft, binary.left().position());
Util.addLnPosTags(ifLeft.getConditionBox(), binary.left().position());
}
soot.Value rVal = createExpr(binary.right());
boolean rightNeedIf = needSootIf(rVal);
if (!(rVal instanceof soot.jimple.ConditionExpr)) {
rVal = soot.jimple.Jimple.v().newEqExpr(rVal, soot.jimple.IntConstant.v(1));
}
else {
rVal = handleDFLCond((soot.jimple.ConditionExpr)rVal);
}
if (rightNeedIf){
soot.jimple.IfStmt ifRight = soot.jimple.Jimple.v().newIfStmt(rVal, noop1);
body.getUnits().add(ifRight);
Util.addLnPosTags(ifRight, binary.right().position());
Util.addLnPosTags(ifRight.getConditionBox(), binary.right().position());
}
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(0));
body.getUnits().add(assign2);
Util.addLnPosTags(assign2, binary.position());
soot.jimple.Stmt gotoEnd2 = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(gotoEnd2);
body.getUnits().add(noop1);
soot.jimple.Stmt assign3 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(1));
body.getUnits().add(assign3);
Util.addLnPosTags(assign3, binary.position());
body.getUnits().add(endNoop);
return retLocal;
}
private soot.Local handleCondBinExpr(soot.jimple.ConditionExpr condExpr) {
soot.Local boolLocal = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value newVal;
newVal = handleDFLCond(condExpr);
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(newVal, noop1);
body.getUnits().add(ifStmt);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(boolLocal, soot.jimple.IntConstant.v(0)));
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(boolLocal, soot.jimple.IntConstant.v(1)));
body.getUnits().add(noop2);
return boolLocal;
}
private soot.Local createStringBuffer(polyglot.ast.Expr expr){
// create and add one string buffer
soot.Local local = lg.generateLocal(soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.NewExpr newExpr = soot.jimple.Jimple.v().newNewExpr(soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, newExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
soot.SootClass classToInvoke1 = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethod methodToInvoke1 = getMethodFromClass(classToInvoke1, "<init>", new ArrayList(), soot.VoidType.v());
soot.jimple.SpecialInvokeExpr invoke = soot.jimple.Jimple.v().newSpecialInvokeExpr(local, methodToInvoke1);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(invoke);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, expr.position());
return local;
}
private soot.Local createToString(soot.Local sb, polyglot.ast.Expr expr){
// invoke toString on local (type StringBuffer)
soot.Local newString = lg.generateLocal(soot.RefType.v("java.lang.String"));
soot.SootClass classToInvoke2 = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethod methodToInvoke2 = getMethodFromClass(classToInvoke2, "toString", new ArrayList(), soot.RefType.v("java.lang.String"));
soot.jimple.VirtualInvokeExpr toStringInvoke = soot.jimple.Jimple.v().newVirtualInvokeExpr(sb, methodToInvoke2);
soot.jimple.Stmt lastAssign = soot.jimple.Jimple.v().newAssignStmt(newString, toStringInvoke);
body.getUnits().add(lastAssign);
Util.addLnPosTags(lastAssign, expr.position());
return newString;
}
private boolean isStringConcat(polyglot.ast.Expr expr){
if (expr instanceof polyglot.ast.Binary) {
polyglot.ast.Binary bin = (polyglot.ast.Binary)expr;
if (bin.operator() == polyglot.ast.Binary.ADD){
if (bin.type().toString().equals("java.lang.String")) return true;
return false;
}
return false;
}
else if (expr instanceof polyglot.ast.Assign) {
polyglot.ast.Assign assign = (polyglot.ast.Assign)expr;
if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN){
if (assign.type().toString().equals("java.lang.String")) return true;
return false;
}
return false;
}
return false;
}
/**
* Generates one part of a concatenation String
*/
private void generateAppends(polyglot.ast.Expr expr, soot.Local sb) {
if (isStringConcat(expr)){
if (expr instanceof polyglot.ast.Binary){
generateAppends(((polyglot.ast.Binary)expr).left(), sb);
generateAppends(((polyglot.ast.Binary)expr).right(), sb);
}
else {
generateAppends(((polyglot.ast.Assign)expr).left(), sb);
generateAppends(((polyglot.ast.Assign)expr).right(), sb);
}
}
else {
soot.Value toApp = createExpr(expr);
soot.Type appendType = null;
if (toApp instanceof soot.jimple.StringConstant) {
appendType = soot.RefType.v("java.lang.String");
}
else if (toApp instanceof soot.jimple.Constant) {
appendType = toApp.getType();
}
else if (toApp instanceof soot.Local) {
if (((soot.Local)toApp).getType() instanceof soot.PrimType) {
appendType = ((soot.Local)toApp).getType();
}
else if (((soot.Local)toApp).getType() instanceof soot.RefType) {
if (((soot.Local)toApp).getType().toString().equals("java.lang.String")){
appendType = soot.RefType.v("java.lang.String");
}
else if (((soot.Local)toApp).getType().toString().equals("java.lang.StringBuffer")){
appendType = soot.RefType.v("java.lang.StringBuffer");
}
else{
appendType = soot.RefType.v("java.lang.Object");
}
}
else {
// this is for arrays
appendType = soot.RefType.v("java.lang.Object");
}
}
else if (toApp instanceof soot.jimple.ConditionExpr) {
toApp = handleCondBinExpr((soot.jimple.ConditionExpr)toApp);
appendType = soot.BooleanType.v();
}
// handle shorts
if (appendType instanceof soot.ShortType || appendType instanceof soot.ByteType) {
soot.Local intLocal = lg.generateLocal(soot.IntType.v());
soot.jimple.Expr cast = soot.jimple.Jimple.v().newCastExpr(toApp, soot.IntType.v());
soot.jimple.Stmt castAssign = soot.jimple.Jimple.v().newAssignStmt(intLocal, cast);
body.getUnits().add(castAssign);
toApp = intLocal;
appendType = soot.IntType.v();
}
ArrayList paramsTypes = new ArrayList();
paramsTypes.add(appendType);
ArrayList params = new ArrayList();
params.add(toApp);
soot.SootClass classToInvoke = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethod methodToInvoke = getMethodFromClass(classToInvoke, "append", paramsTypes, soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.VirtualInvokeExpr appendInvoke = soot.jimple.Jimple.v().newVirtualInvokeExpr(sb, methodToInvoke, params);
soot.jimple.Stmt appendStmt = soot.jimple.Jimple.v().newInvokeStmt(appendInvoke);
body.getUnits().add(appendStmt);
Util.addLnPosTags(appendStmt, expr.position());
}
}
/**
* Unary Expression Creation
*/
private soot.Local getUnaryLocal(polyglot.ast.Unary unary) {
polyglot.ast.Expr expr = unary.expr();
polyglot.ast.Unary.Operator op = unary.operator();
if (op == polyglot.ast.Unary.POST_INC){
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.AssignStmt preStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(preStmt);
soot.jimple.AddExpr addExpr = soot.jimple.Jimple.v().newAddExpr(sootExpr, getConstant(retLocal.getType(), 1));
Util.addLnPosTags(addExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, addExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
soot.jimple.AssignStmt aStmt = soot.jimple.Jimple.v().newAssignStmt(sootExpr, local);
body.getUnits().add(aStmt);
Util.addLnPosTags(aStmt, expr.position());
Util.addLnPosTags(aStmt, unary.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess)) {
soot.Value actualUnaryExpr = createLHS(expr);
soot.jimple.AssignStmt s = soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local);
body.getUnits().add(s);
Util.addLnPosTags(s, expr.position());
Util.addLnPosTags(s.getLeftOpBox(), expr.position());
}
return retLocal;
}
else if (op == polyglot.ast.Unary.POST_DEC) {
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.AssignStmt preStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(preStmt);
soot.jimple.SubExpr subExpr = soot.jimple.Jimple.v().newSubExpr(sootExpr, getConstant(retLocal.getType(), 1));
Util.addLnPosTags(subExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, subExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
soot.jimple.AssignStmt aStmt = soot.jimple.Jimple.v().newAssignStmt(sootExpr, local);
body.getUnits().add(aStmt);
Util.addLnPosTags(aStmt, expr.position());
Util.addLnPosTags(aStmt, unary.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess)) {
soot.Value actualUnaryExpr = createLHS(expr);
soot.jimple.AssignStmt s = soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local);
body.getUnits().add(s);
Util.addLnPosTags(s, expr.position());
Util.addLnPosTags(s.getLeftOpBox(), expr.position());
}
return retLocal;
}
else if (op == polyglot.ast.Unary.PRE_INC) {
soot.Value sootExpr = createExpr(expr);
soot.jimple.AddExpr addExpr = soot.jimple.Jimple.v().newAddExpr(sootExpr, getConstant(sootExpr.getType(), 1));
Util.addLnPosTags(addExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, addExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess) || (expr instanceof polyglot.ast.Local)) {
soot.Value actualUnaryExpr = createLHS(expr);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local));
}
return local;
}
else if (op == polyglot.ast.Unary.PRE_DEC) {
soot.Value sootExpr = createExpr(expr);
soot.jimple.SubExpr subExpr = soot.jimple.Jimple.v().newSubExpr(sootExpr, getConstant(sootExpr.getType(), 1));
Util.addLnPosTags(subExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, subExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess) || (expr instanceof polyglot.ast.Local)) {
soot.Value actualUnaryExpr = createLHS(expr);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local));
}
return local;
}
else if (op == polyglot.ast.Unary.BIT_NOT) {
soot.jimple.IntConstant int1 = soot.jimple.IntConstant.v(-1);
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.XorExpr xor = soot.jimple.Jimple.v().newXorExpr(sootExpr, getConstant(sootExpr.getType(), -1));
Util.addLnPosTags(xor.getOp1Box(), expr.position());
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, xor);
body.getUnits().add(assign1);
Util.addLnPosTags(assign1, unary.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.NEG) {
soot.Value sootExpr;
if (expr instanceof polyglot.ast.NumLit) {
int intVal = (int)((polyglot.ast.NumLit)expr).longValue();
sootExpr = soot.jimple.IntConstant.v(-intVal);
}
else if (expr instanceof polyglot.ast.FloatLit){
double doubleVal = ((polyglot.ast.FloatLit)expr).value();
if (((polyglot.ast.FloatLit)expr).kind() == polyglot.ast.FloatLit.DOUBLE){
sootExpr = soot.jimple.DoubleConstant.v(-doubleVal);
}
else {
sootExpr = soot.jimple.FloatConstant.v(-(float)doubleVal);
}
}
else {
soot.Value local = createExpr(expr);
soot.jimple.NegExpr negExpr = soot.jimple.Jimple.v().newNegExpr(local);
sootExpr = negExpr;
Util.addLnPosTags(negExpr.getOpBox(), expr.position());
}
soot.Local retLocal = generateLocal(expr.type());
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.POS) {
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.NOT) {
soot.Value local = createExpr(expr);
if (local instanceof soot.jimple.ConditionExpr){
local = handleCondBinExpr((soot.jimple.ConditionExpr)local);
}
soot.jimple.NeExpr neExpr = soot.jimple.Jimple.v().newNeExpr(local, getConstant(local.getType(), 0));
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(neExpr, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt, expr.position());
soot.Local retLocal = lg.generateLocal(local.getType());
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, getConstant(retLocal.getType(), 1));
body.getUnits().add(assign1);
Util.addLnPosTags(assign1, expr.position());
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, getConstant(retLocal.getType(), 0));
body.getUnits().add(assign2);
Util.addLnPosTags(assign2, expr.position());
body.getUnits().add(noop2);
return retLocal;
}
else {
throw new RuntimeException("Unhandled Unary Expr");
}
}
/**
* Returns a needed constant given a type and val
*/
private soot.jimple.Constant getConstant(soot.Type type, int val) {
if (type instanceof soot.DoubleType) {
return soot.jimple.DoubleConstant.v(val);
}
else if (type instanceof soot.FloatType) {
return soot.jimple.FloatConstant.v(val);
}
else if (type instanceof soot.LongType) {
return soot.jimple.LongConstant.v(val);
}
else {
return soot.jimple.IntConstant.v(val);
}
}
/**
* Cast Expression Creation
*/
private soot.Value getCastLocal(polyglot.ast.Cast castExpr){
// if its already the right type
if (castExpr.expr().type().equals(castExpr.type())) {
return createExpr(castExpr.expr());
}
//else
soot.Value val;
/*if (castExpr.expr() instanceof polyglot.ast.Cast) {
val = createExpr(((polyglot.ast.Cast)castExpr.expr()).expr());
}
else {*/
val = createExpr(castExpr.expr());
soot.Type type = Util.getSootType(castExpr.type());
soot.jimple.CastExpr cast = soot.jimple.Jimple.v().newCastExpr(val, type);
Util.addLnPosTags(cast.getOpBox(), castExpr.expr().position());
soot.Local retLocal = lg.generateLocal(cast.getCastType());
soot.jimple.Stmt castAssign = soot.jimple.Jimple.v().newAssignStmt(retLocal, cast);
body.getUnits().add(castAssign);
Util.addLnPosTags(castAssign, castExpr.position());
return retLocal;
}
/**
* Procedure Call Helper Methods
* Returns list of params
*/
private ArrayList getSootParams(polyglot.ast.ProcedureCall call) {
ArrayList sootParams = new ArrayList();
Iterator it = call.arguments().iterator();
while (it.hasNext()) {
polyglot.ast.Expr next = (polyglot.ast.Expr)it.next();
soot.Value nextExpr = createExpr(next);
if (nextExpr instanceof soot.jimple.ConditionExpr){
nextExpr = handleCondBinExpr((soot.jimple.ConditionExpr)nextExpr);
}
sootParams.add(nextExpr);
}
return sootParams;
}
/**
* Returns list of param types
*/
private ArrayList getSootParamsTypes(polyglot.ast.ProcedureCall call) {
ArrayList sootParamsTypes = new ArrayList();
Iterator it = call.procedureInstance().formalTypes().iterator();
while (it.hasNext()) {
Object next = it.next();
sootParamsTypes.add(Util.getSootType((polyglot.types.Type)next));
}
return sootParamsTypes;
}
/**
* Gets the Soot Method form the given Soot Class
*/
private soot.SootMethod getMethodFromClass(soot.SootClass sootClass, String name, ArrayList paramTypes, soot.Type returnType) {
try {
//if (sootClass.declaresMethod(name, paramTypes, returnType)){
return sootClass.getMethod(name, paramTypes, returnType);
}
catch(Exception e){
//else {
// its possible we're looking for a init method for an inner
// class and we have a subclass as the outerclass this ref param
// type but the actually method uses the super class
if (paramTypes != null && !paramTypes.isEmpty()){
soot.SootClass firstParam = ((soot.RefType)paramTypes.get(0)).getSootClass();
boolean foundMeth = false;
while (!foundMeth){
paramTypes.set(0, firstParam.getSuperclass().getType());
if (sootClass.declaresMethod(name, paramTypes, returnType)){
return sootClass.getMethod(name, paramTypes, returnType);
}
firstParam = firstParam.getSuperclass();
}
}
}
return null;
}
/**
* Adds extra params
*/
private void handleFinalLocalParams(ArrayList sootParams, ArrayList sootParamTypes, polyglot.types.ClassType keyType){
HashMap finalLocalInfo = soot.javaToJimple.InitialResolver.v().finalLocalInfo();
if (finalLocalInfo != null){
if (finalLocalInfo.containsKey(new polyglot.util.IdentityKey(keyType))){
AnonLocalClassInfo alci = (AnonLocalClassInfo)finalLocalInfo.get(new polyglot.util.IdentityKey(keyType));
ArrayList finalLocals = alci.finalLocals();
if (finalLocals != null){
Iterator it = finalLocals.iterator();
while (it.hasNext()){
polyglot.types.LocalInstance li = (polyglot.types.LocalInstance)((polyglot.util.IdentityKey)it.next()).object();
sootParamTypes.add(Util.getSootType(li.type()));
sootParams.add(getLocal(li));
}
}
}
}
}
private soot.Local getThis(soot.Type sootType){
return Util.getThis(sootType, body, getThisMap, lg);
}
/**
* adds outer class params
*/
private void handleOuterClassParams(ArrayList sootParams, ArrayList sootParamsTypes, polyglot.types.ClassType typeToInvoke){
ArrayList needsRef = soot.javaToJimple.InitialResolver.v().getHasOuterRefInInit();
if ((needsRef != null) && (needsRef.contains(Util.getSootType(typeToInvoke)))){
soot.SootClass outerClass = ((soot.RefType)Util.getSootType(typeToInvoke.outer())).getSootClass();
soot.Local classToInvokeOuterParam = getThis(outerClass.getType());
sootParamsTypes.add(outerClass.getType());
sootParams.add(classToInvokeOuterParam);
}
}
/**
* Constructor Call Creation
*/
private void createConstructorCall(polyglot.ast.ConstructorCall cCall) {
ArrayList sootParams = new ArrayList();
ArrayList sootParamsTypes = new ArrayList();
polyglot.types.ConstructorInstance cInst = cCall.constructorInstance();
String containerName = null;
if (cInst.container() instanceof polyglot.types.ClassType) {
containerName = ((polyglot.types.ClassType)cInst.container()).fullName();
}
soot.SootClass classToInvoke;
if (cCall.kind() == polyglot.ast.ConstructorCall.SUPER) {
classToInvoke = ((soot.RefType)Util.getSootType(cInst.container())).getSootClass();
}
else if (cCall.kind() == polyglot.ast.ConstructorCall.THIS) {
classToInvoke = body.getMethod().getDeclaringClass();
}
else {
throw new RuntimeException("Unknown kind of Constructor Call");
}
soot.Local base = specialThisLocal;
if (cCall.qualifier() != null){
polyglot.types.ClassType objType = (polyglot.types.ClassType)cInst.container();
if ((objType.outer() != null) && (body.getMethod().getDeclaringClass().equals(((soot.RefType)Util.getSootType(objType.outer())).getSootClass()))){
handleOuterClassParams(sootParams, sootParamsTypes, objType);
}
else {
soot.Local qVal = (soot.Local)createExpr(cCall.qualifier());
sootParams.add(qVal);
sootParamsTypes.add(qVal.getType());
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(soot.jimple.Jimple.v().newVirtualInvokeExpr(qVal, soot.Scene.v().getSootClass("java.lang.Object").getMethodByName("getClass"), new ArrayList())));
}
}
else {
handleOuterClassParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)cInst.container());
}
int index = classToInvoke.getName().lastIndexOf("$");
sootParams.addAll(getSootParams(cCall));
sootParamsTypes.addAll(getSootParamsTypes(cCall));
if (index != -1){
// fix this to send the final locals list instead of empty array
handleFinalLocalParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)cCall.constructorInstance().container());
}
soot.SootMethod methodToInvoke = getMethodFromClass(classToInvoke, "<init>", sootParamsTypes, soot.VoidType.v());
soot.jimple.SpecialInvokeExpr specialInvokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(base, methodToInvoke, sootParams);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(specialInvokeExpr);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, cCall.position());
int numParams = 0;
Iterator invokeParamsIt = cCall.arguments().iterator();
while (invokeParamsIt.hasNext()) {
Util.addLnPosTags(specialInvokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)invokeParamsIt.next()).position());
numParams++;
}
// if method is <init> handle field inits
if (body.getMethod().getName().equals("<init>") && (cCall.kind() == polyglot.ast.ConstructorCall.SUPER)){
handleOuterClassThisInit(body.getMethod());
handleFinalLocalInits();
handleFieldInits(body.getMethod());
handleInitializerBlocks(body.getMethod());
}
}
private void handleFinalLocalInits(){
ArrayList finalsList = ((PolyglotMethodSource)body.getMethod().getSource()).getFinalsList();
if (finalsList == null) return;
int paramCount = paramRefCount - finalsList.size();
Iterator it = finalsList.iterator();
while (it.hasNext()){
soot.SootField sf = (soot.SootField)it.next();
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, sf);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(fieldRef, body.getParameterLocal(paramCount));
body.getUnits().add(stmt);
paramCount++;
}
}
/**
* Local Class Decl - Local Inner Class
*/
private void createLocalClassDecl(polyglot.ast.LocalClassDecl cDecl) {
BiMap lcMap = InitialResolver.v().getLocalClassMap();
String name = Util.getSootType(cDecl.decl().type()).toString();
if (!InitialResolver.v().hasClassInnerTag(body.getMethod().getDeclaringClass(), name)){
body.getMethod().getDeclaringClass().addTag(
new soot.tagkit.InnerClassTag(
name,
null, //"<not a member>",
cDecl.decl().name(),
Util.getModifier(cDecl.decl().flags()) ));
}
}
/**
* New Expression Creation
*/
private soot.Local getNewLocal(polyglot.ast.New newExpr) {
// handle parameters/args
ArrayList sootParams = new ArrayList();
ArrayList sootParamsTypes = new ArrayList();
polyglot.types.ClassType objType = (polyglot.types.ClassType)newExpr.objectType().type();
if (newExpr.anonType() != null){
objType = newExpr.anonType();
// add inner class tags for any anon classes created
String name = Util.getSootType(objType).toString();
polyglot.types.ClassType outerType = objType.outer();
if (!InitialResolver.v().hasClassInnerTag(body.getMethod().getDeclaringClass(), name)){
body.getMethod().getDeclaringClass().addTag(
new soot.tagkit.InnerClassTag(
name,
null,//"<not a member>",
null,//"<anonymous>",
outerType.flags().isInterface() ? soot.Modifier.PUBLIC | soot.Modifier.STATIC : Util.getModifier(objType.flags()) ));
}
}
else {
// not an anon class but actually invoking a new something
if (!objType.isTopLevel()){
String name = Util.getSootType(objType).toString();
polyglot.types.ClassType outerType = objType.outer();
if (!InitialResolver.v().hasClassInnerTag(body.getMethod().getDeclaringClass(), name)){
body.getMethod().getDeclaringClass().addTag(
new soot.tagkit.InnerClassTag(
name,
Util.getSootType(outerType).toString(),
objType.name(),
outerType.flags().isInterface() ? soot.Modifier.PUBLIC | soot.Modifier.STATIC : Util.getModifier(objType.flags()) ));
}
}
}
soot.RefType sootType = (soot.RefType)Util.getSootType(objType);
soot.Local retLocal = lg.generateLocal(sootType);
soot.jimple.NewExpr sootNew = soot.jimple.Jimple.v().newNewExpr(sootType);
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootNew);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, newExpr.position());
soot.SootClass classToInvoke = sootType.getSootClass();
if (newExpr.qualifier() != null){
if ((objType.outer() != null) && (body.getMethod().getDeclaringClass().equals(((soot.RefType)Util.getSootType(objType.outer())).getSootClass())) && (!soot.Modifier.isStatic(body.getMethod().getModifiers()))){
handleOuterClassParams(sootParams, sootParamsTypes, objType);
}
else {
soot.Value qVal = createExpr(newExpr.qualifier());
sootParams.add(qVal);
sootParamsTypes.add(qVal.getType());
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(soot.jimple.Jimple.v().newVirtualInvokeExpr((soot.Local)qVal, soot.Scene.v().getSootClass("java.lang.Object").getMethodByName("getClass"), new ArrayList())));
}
}
else {
handleOuterClassParams(sootParams, sootParamsTypes, objType);
}
sootParams.addAll(getSootParams(newExpr));
sootParamsTypes.addAll(getSootParamsTypes(newExpr));
handleFinalLocalParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)objType);
soot.SootMethod methodToInvoke = getMethodFromClass(classToInvoke, "<init>", sootParamsTypes, soot.VoidType.v());
if (!methodToInvoke.getDeclaringClass().getType().equals(classToInvoke.getType())){
throw new RuntimeException("created new for type: "+classToInvoke.getType()+" but didn't find needed initializer there instead found initializer in "+methodToInvoke.getDeclaringClass().getType());
}
soot.jimple.SpecialInvokeExpr specialInvokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(retLocal, methodToInvoke, sootParams);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(specialInvokeExpr);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, newExpr.position());
int numParams = 0;
Iterator invokeParamsIt = newExpr.arguments().iterator();
while (invokeParamsIt.hasNext()) {
Util.addLnPosTags(specialInvokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)invokeParamsIt.next()).position());
numParams++;
}
return retLocal;
}
/**
* Call Expression Creation
*/
private soot.Local getCallLocal(polyglot.ast.Call call){
// handle name
String name = call.name();
// handle receiver/target
polyglot.ast.Receiver receiver = call.target();
soot.Local baseLocal;
if ((receiver instanceof polyglot.ast.Special) && (((polyglot.ast.Special)receiver).kind() == polyglot.ast.Special.SUPER) && (((polyglot.ast.Special)receiver).qualifier() != null)){
baseLocal = getSpecialSuperQualifierLocal(call);
return baseLocal;
}
baseLocal = (soot.Local)getBaseLocal(receiver);
soot.Type sootRecType = Util.getSootType(receiver.type());
soot.SootClass receiverTypeClass = soot.Scene.v().getSootClass("java.lang.Object");
if (sootRecType instanceof soot.RefType){
receiverTypeClass = ((soot.RefType)sootRecType).getSootClass();
}
polyglot.types.MethodInstance methodInstance = call.methodInstance();
soot.Type sootRetType = Util.getSootType(methodInstance.returnType());
ArrayList sootParamsTypes = getSootParamsTypes(call);
ArrayList sootParams = getSootParams(call);
soot.SootMethod callMethod = getMethodFromClass(receiverTypeClass, methodInstance.name(), sootParamsTypes, sootRetType);
boolean isPrivateAccess = false;
soot.javaToJimple.PolyglotMethodSource ms = (soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource();
if ((ms.getPrivateAccessMap() != null) && (ms.getPrivateAccessMap().containsKey(call.methodInstance()))){
callMethod = (soot.SootMethod)ms.getPrivateAccessMap().get(call.methodInstance());
if (!call.methodInstance().flags().isStatic()){
sootParams.add(baseLocal);
}
isPrivateAccess = true;
}
soot.jimple.InvokeExpr invokeExpr;
if (isPrivateAccess){
// for accessing private methods in outer class -> always static
invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(callMethod, sootParams);
}
else if (soot.Modifier.isInterface(receiverTypeClass.getModifiers()) && methodInstance.flags().isAbstract()) {
// if reciever class is interface and method is abstract -> interface
invokeExpr = soot.jimple.Jimple.v().newInterfaceInvokeExpr(baseLocal, callMethod, sootParams);
}
else if (methodInstance.flags().isStatic()){
// if flag isStatic -> static invoke
invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(callMethod, sootParams);
}
else if (methodInstance.flags().isPrivate()){
// if flag isPrivate -> special invoke
invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(baseLocal, callMethod, sootParams);
}
else if ((receiver instanceof polyglot.ast.Special) &&
(((polyglot.ast.Special)receiver).kind() == polyglot.ast.Special.SUPER)){
// receiver is special super -> special
invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(baseLocal, callMethod, sootParams);
}
else {
// else virtual invoke
invokeExpr = soot.jimple.Jimple.v().newVirtualInvokeExpr(baseLocal, callMethod, sootParams);
}
int numParams = 0;
Iterator callParamsIt = call.arguments().iterator();
while (callParamsIt.hasNext()) {
Util.addLnPosTags(invokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)callParamsIt.next()).position());
numParams++;
}
if (invokeExpr instanceof soot.jimple.InstanceInvokeExpr) {
Util.addLnPosTags(((soot.jimple.InstanceInvokeExpr)invokeExpr).getBaseBox(), call.target().position());
}
// create an assign stmt so invoke can be used somewhere else
if (invokeExpr.getMethod().getReturnType().equals(soot.VoidType.v())) {
soot.jimple.Stmt invoke = soot.jimple.Jimple.v().newInvokeStmt(invokeExpr);
body.getUnits().add(invoke);
Util.addLnPosTags(invoke, call.position());
return null;
}
else {
soot.Local retLocal = lg.generateLocal(invokeExpr.getMethod().getReturnType());
soot.jimple.Stmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invokeExpr);
// add assign stmt to body
body.getUnits().add(assignStmt);
Util.addLnPosTags(assignStmt, call.position());
return retLocal;
}
}
private soot.Value getBaseLocal(polyglot.ast.Receiver receiver) {
if (receiver instanceof polyglot.ast.TypeNode) {
return generateLocal(((polyglot.ast.TypeNode)receiver).type());
}
else {
soot.Value val = createExpr((polyglot.ast.Expr)receiver);
if (val instanceof soot.jimple.Constant) {
soot.Local retLocal = lg.generateLocal(val.getType());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, val);
body.getUnits().add(stmt);
return retLocal;
}
return val;
}
}
/**
* NewArray Expression Creation
*/
private soot.Local getNewArrayLocal(polyglot.ast.NewArray newArrExpr) {
soot.Type sootType = Util.getSootType(newArrExpr.type());
soot.jimple.Expr expr;
if (newArrExpr.numDims() == 1) {
soot.Value dimLocal;
if (newArrExpr.additionalDims() == 1) {
dimLocal = soot.jimple.IntConstant.v(1);
}
else {
dimLocal = createExpr((polyglot.ast.Expr)newArrExpr.dims().get(0));
}
soot.jimple.NewArrayExpr newArrayExpr = soot.jimple.Jimple.v().newNewArrayExpr(((soot.ArrayType)sootType).getElementType(), dimLocal);
expr = newArrayExpr;
if (newArrExpr.additionalDims() != 1){
Util.addLnPosTags(newArrayExpr.getSizeBox(), ((polyglot.ast.Expr)newArrExpr.dims().get(0)).position());
}
}
else {
ArrayList valuesList = new ArrayList();
Iterator it = newArrExpr.dims().iterator();
while (it.hasNext()){
valuesList.add(createExpr((polyglot.ast.Expr)it.next()));
}
if (newArrExpr.additionalDims() != 0) {
valuesList.add(soot.jimple.IntConstant.v(newArrExpr.additionalDims()));
}
soot.jimple.NewMultiArrayExpr newMultiArrayExpr = soot.jimple.Jimple.v().newNewMultiArrayExpr((soot.ArrayType)sootType, valuesList);
expr = newMultiArrayExpr;
Iterator sizeBoxIt = newArrExpr.dims().iterator();
int counter = 0;
while (sizeBoxIt.hasNext()){
Util.addLnPosTags(newMultiArrayExpr.getSizeBox(counter), ((polyglot.ast.Expr)sizeBoxIt.next()).position());
counter++;
}
}
soot.Local retLocal = lg.generateLocal(sootType);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, expr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, newArrExpr.position());
Util.addLnPosTags(stmt.getRightOpBox(), newArrExpr.position());
// handle array init if one exists
if (newArrExpr.init() != null) {
soot.Value initVal = getArrayInitLocal(newArrExpr.init(), newArrExpr.type());
soot.jimple.AssignStmt initStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, initVal);
body.getUnits().add(initStmt);
}
return retLocal;
}
/**
* create ArrayInit given init and the array local
*/
private soot.Local getArrayInitLocal(polyglot.ast.ArrayInit arrInit, polyglot.types.Type lhsType) {
soot.Local local = generateLocal(lhsType);
soot.jimple.NewArrayExpr arrExpr = soot.jimple.Jimple.v().newNewArrayExpr(((soot.ArrayType)local.getType()).getElementType(), soot.jimple.IntConstant.v(arrInit.elements().size()));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, arrExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, arrInit.position());
Iterator it = arrInit.elements().iterator();
int index = 0;
while (it.hasNext()){
polyglot.ast.Expr elemExpr = (polyglot.ast.Expr)it.next();
soot.Value elem;
if (elemExpr instanceof polyglot.ast.ArrayInit){
if (((polyglot.ast.ArrayInit)elemExpr).type() instanceof polyglot.types.NullType) {
if (lhsType instanceof polyglot.types.ArrayType){
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.types.ArrayType)lhsType).base());
}
else {
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, lhsType);
}
}
else {
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.ast.ArrayInit)elemExpr).type());
}
}
else {
elem = createExpr(elemExpr);
}
soot.jimple.ArrayRef arrRef = soot.jimple.Jimple.v().newArrayRef(local, soot.jimple.IntConstant.v(index));
soot.jimple.AssignStmt elemAssign = soot.jimple.Jimple.v().newAssignStmt(arrRef, elem);
body.getUnits().add(elemAssign);
Util.addLnPosTags(elemAssign, elemExpr.position());
Util.addLnPosTags(elemAssign.getRightOpBox(), elemExpr.position());
index++;
}
return local;
}
/**
* create LHS expressions
*/
private soot.Value createLHS(polyglot.ast.Expr expr) {
if (expr instanceof polyglot.ast.Local) {
return getLocal((polyglot.ast.Local)expr);
}
else if (expr instanceof polyglot.ast.ArrayAccess) {
return getArrayRefLocalLeft((polyglot.ast.ArrayAccess)expr);
}
else if (expr instanceof polyglot.ast.Field) {
return getFieldLocalLeft((polyglot.ast.Field)expr);
}
else {
throw new RuntimeException("Unhandled LHS");
}
}
/**
* Array Ref Expression Creation - LHS
*/
private soot.Value getArrayRefLocalLeft(polyglot.ast.ArrayAccess arrayRefExpr) {
polyglot.ast.Expr array = arrayRefExpr.array();
polyglot.ast.Expr access = arrayRefExpr.index();
soot.Local arrLocal = (soot.Local)createExpr(array);
soot.Value arrAccess = createExpr(access);
soot.Local retLocal = generateLocal(arrayRefExpr.type());
soot.jimple.ArrayRef ref = soot.jimple.Jimple.v().newArrayRef(arrLocal, arrAccess);
Util.addLnPosTags(ref.getBaseBox(), arrayRefExpr.array().position());
Util.addLnPosTags(ref.getIndexBox(), arrayRefExpr.index().position());
return ref;
}
/**
* Array Ref Expression Creation
*/
private soot.Value getArrayRefLocal(polyglot.ast.ArrayAccess arrayRefExpr) {
polyglot.ast.Expr array = arrayRefExpr.array();
polyglot.ast.Expr access = arrayRefExpr.index();
soot.Local arrLocal = (soot.Local)createExpr(array);
soot.Value arrAccess = createExpr(access);
soot.Local retLocal = generateLocal(arrayRefExpr.type());
soot.jimple.ArrayRef ref = soot.jimple.Jimple.v().newArrayRef(arrLocal, arrAccess);
Util.addLnPosTags(ref.getBaseBox(), arrayRefExpr.array().position());
Util.addLnPosTags(ref.getIndexBox(), arrayRefExpr.index().position());
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, ref);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, arrayRefExpr.position());
return retLocal;
}
private soot.Local getSpecialSuperQualifierLocal(polyglot.ast.Expr expr){
soot.SootClass classToInvoke;
ArrayList methodParams = new ArrayList();
if (expr instanceof polyglot.ast.Call){
polyglot.ast.Special target = (polyglot.ast.Special)((polyglot.ast.Call)expr).target();
classToInvoke = ((soot.RefType)Util.getSootType(target.qualifier().type())).getSootClass();
methodParams = getSootParams((polyglot.ast.Call)expr);
}
else if (expr instanceof polyglot.ast.Field){
polyglot.ast.Special target = (polyglot.ast.Special)((polyglot.ast.Field)expr).target();
classToInvoke = ((soot.RefType)Util.getSootType(target.qualifier().type())).getSootClass();
}
else {
throw new RuntimeException("Trying to create special super qualifier for: "+expr+" which is not a field or call");
}
// make an access method
soot.SootMethod methToInvoke = makeSuperAccessMethod(classToInvoke, expr);
// invoke it
soot.Local classToInvokeLocal = Util.getThis(classToInvoke.getType(), body, getThisMap, lg);
methodParams.add(0, classToInvokeLocal);
soot.jimple.InvokeExpr invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(methToInvoke, methodParams);
// return the local of return type if not void
if (!methToInvoke.getReturnType().equals(soot.VoidType.v())){
soot.Local retLocal = lg.generateLocal(methToInvoke.getReturnType());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invokeExpr);
body.getUnits().add(stmt);
return retLocal;
}
else {
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(invokeExpr));
return null;
}
}
/**
* Special Expression Creation
*/
private soot.Local getSpecialLocal(polyglot.ast.Special specialExpr) {
if (specialExpr.kind() == polyglot.ast.Special.SUPER) {
if (specialExpr.qualifier() == null){
return specialThisLocal;
}
else {
// this isn't enough
// need to getThis for the type which may be several levels up
// add access$N method to class of the type which returns
// field or method wanted
// invoke it
// and it needs to be called specially when getting fields
// or calls because need to know field or method to access
// as it access' a field or meth in the super class of the
// outer class refered to by the qualifier
return getThis(Util.getSootType(specialExpr.qualifier().type()));
}
}
else if (specialExpr.kind() == polyglot.ast.Special.THIS) {
if (specialExpr.qualifier() == null) {
return specialThisLocal;
}
else {
return getThis(Util.getSootType(specialExpr.qualifier().type()));
}
}
else {
throw new RuntimeException("Unknown Special");
}
}
private soot.SootMethod makeSuperAccessMethod(soot.SootClass classToInvoke, Object memberToAccess){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
paramTypes.add(classToInvoke.getType());
soot.SootMethod meth;
soot.MethodSource src;
if (memberToAccess instanceof polyglot.ast.Field){
polyglot.ast.Field fieldToAccess = (polyglot.ast.Field)memberToAccess;
meth = new soot.SootMethod(name, paramTypes, Util.getSootType(fieldToAccess.type()), soot.Modifier.STATIC);
PrivateFieldAccMethodSource fSrc = new PrivateFieldAccMethodSource();
fSrc.fieldName(fieldToAccess.name());
fSrc.fieldType(Util.getSootType(fieldToAccess.type()));
fSrc.classToInvoke(((soot.RefType)Util.getSootType(fieldToAccess.target().type())).getSootClass());
src = fSrc;
}
else if (memberToAccess instanceof polyglot.ast.Call){
polyglot.ast.Call methToAccess = (polyglot.ast.Call)memberToAccess;
paramTypes.addAll(getSootParamsTypes(methToAccess));
meth = new soot.SootMethod(name, paramTypes, Util.getSootType(methToAccess.methodInstance().returnType()), soot.Modifier.STATIC);
PrivateMethodAccMethodSource mSrc = new PrivateMethodAccMethodSource();
mSrc.setMethodInst(methToAccess.methodInstance());
src = mSrc;
}
else {
throw new RuntimeException("trying to access unhandled member type: "+memberToAccess);
}
classToInvoke.addMethod(meth);
meth.setActiveBody(src.getBody(meth, null));
return meth;
}
/**
* InstanceOf Expression Creation
*/
private soot.Local getInstanceOfLocal(polyglot.ast.Instanceof instExpr) {
soot.Type sootType = Util.getSootType(instExpr.compareType().type());
soot.Value local = createExpr(instExpr.expr());
soot.jimple.InstanceOfExpr instOfExpr = soot.jimple.Jimple.v().newInstanceOfExpr(local, sootType);
soot.Local lhs = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt instAssign = soot.jimple.Jimple.v().newAssignStmt(lhs, instOfExpr);
body.getUnits().add(instAssign);
Util.addLnPosTags(instAssign, instExpr.position());
Util.addLnPosTags(instOfExpr.getOpBox(), instExpr.expr().position());
return lhs;
}
/**
* Condition Expression Creation - can maybe merge with If
*/
private soot.Local getConditionalLocal(polyglot.ast.Conditional condExpr){
// handle cond
polyglot.ast.Expr condition = condExpr.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt, condExpr.position());
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
}
soot.Local retLocal = generateLocal(condExpr.alternative().type());
// handle consequence
polyglot.ast.Expr consequence = condExpr.consequent();
soot.Value conseqVal = createExpr(consequence);
soot.jimple.AssignStmt conseqAssignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, conseqVal);
body.getUnits().add(conseqAssignStmt);
Util.addLnPosTags(conseqAssignStmt, condExpr.position());
Util.addLnPosTags(conseqAssignStmt.getRightOpBox(), consequence.position());
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
// handle alternative
body.getUnits().add(noop1);
polyglot.ast.Expr alternative = condExpr.alternative();
if (alternative != null){
soot.Value altVal = createExpr(alternative);
soot.jimple.AssignStmt altAssignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, altVal);
body.getUnits().add(altAssignStmt);
Util.addLnPosTags(altAssignStmt, condExpr.position());
Util.addLnPosTags(altAssignStmt, alternative.position());
Util.addLnPosTags(altAssignStmt.getRightOpBox(), alternative.position());
}
body.getUnits().add(noop2);
return retLocal;
}
/**
* Utility methods
*/
private boolean isLitOrLocal(polyglot.ast.Expr exp) {
if (exp instanceof polyglot.ast.Lit) return true;
if (exp instanceof polyglot.ast.Local) return true;
else return false;
}
/**
* Extra Local Variables Generation
*/
private soot.Local generateLocal(polyglot.types.Type polyglotType) {
soot.Type type = Util.getSootType(polyglotType);
return lg.generateLocal(type);
}
}
|
package soot.javaToJimple;
import java.util.*;
import soot.SootFieldRef;
public class JimpleBodyBuilder extends AbstractJimpleBodyBuilder {
public JimpleBodyBuilder(){
//ext(null);
//base(this);
}
ArrayList exceptionTable; // list of exceptions
Stack endControlNoop = new Stack(); // for break
Stack condControlNoop = new Stack(); // continue
Stack monitorStack; // for synchronized blocks
Stack tryStack; // for try stmts in case of returns
Stack catchStack; // for catch stmts in case of returns
HashMap labelBreakMap; // for break label --> nop to jump to
HashMap labelContinueMap; // for continue label --> nop to jump to
String lastLabel;
HashMap localsMap = new HashMap(); // localInst --> soot local
HashMap getThisMap = new HashMap(); // type --> local to ret
soot.Local specialThisLocal;
soot.Local outerClassParamLocal; // outer class this
private int paramRefCount = 0; // counter for param ref stmts
LocalGenerator lg; // for generated locals not in orig src
/**
* Jimple Body Creation
*/
public soot.jimple.JimpleBody createJimpleBody(polyglot.ast.Block block, List formals, soot.SootMethod sootMethod){
createBody(sootMethod);
lg = new LocalGenerator(body);
// create this formal except for static methods
if (!soot.Modifier.isStatic(sootMethod.getModifiers())) {
soot.RefType type = sootMethod.getDeclaringClass().getType();
specialThisLocal = soot.jimple.Jimple.v().newLocal("this", type);
body.getLocals().add(specialThisLocal);
soot.jimple.ThisRef thisRef = soot.jimple.Jimple.v().newThisRef(type);
soot.jimple.Stmt thisStmt = soot.jimple.Jimple.v().newIdentityStmt(specialThisLocal, thisRef);
body.getUnits().add(thisStmt);
// this is causing problems - no this in java code -> no tags
//Util.addLineTag(thisStmt, block);
}
int formalsCounter = 0;
//create outer class this param ref for inner classes except for static inner classes - this is not needed
int outerIndex = sootMethod.getDeclaringClass().getName().lastIndexOf("$");
int classMod = sootMethod.getDeclaringClass().getModifiers();
if ((outerIndex != -1) && (sootMethod.getName().equals("<init>")) && sootMethod.getDeclaringClass().declaresFieldByName("this$0")){
// we know its an inner non static class can get outer class
// from field ref of the this$0 field
soot.SootClass outerClass = ((soot.RefType)sootMethod.getDeclaringClass().getFieldByName("this$0").getType()).getSootClass();
soot.Local outerLocal = lg.generateLocal(outerClass.getType());
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(outerClass.getType(), formalsCounter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(outerLocal, paramRef);
stmt.addTag(new soot.tagkit.EnclosingTag());
body.getUnits().add(stmt);
((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).setOuterClassThisInit(outerLocal);
outerClassParamLocal = outerLocal;
formalsCounter++;
}
// handle formals
if (formals != null) {
ArrayList formalNames = new ArrayList();
Iterator formalsIt = formals.iterator();
while (formalsIt.hasNext()) {
polyglot.ast.Formal formal = (polyglot.ast.Formal)formalsIt.next();
createFormal(formal, formalsCounter);
formalNames.add(formal.name());
formalsCounter++;
}
body.getMethod().addTag(new soot.tagkit.ParamNamesTag(formalNames));
}
// handle final local params
ArrayList finalsList = ((PolyglotMethodSource)body.getMethod().getSource()).getFinalsList();
if (finalsList != null){
Iterator finalsIt = finalsList.iterator();
while (finalsIt.hasNext()){
soot.SootField sf = (soot.SootField)finalsIt.next();
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(sf.getType(), formalsCounter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(lg.generateLocal(sf.getType()), paramRef);
body.getUnits().add(stmt);
formalsCounter++;
}
}
createBlock(block);
// if method is <clinit> handle static field inits
if (sootMethod.getName().equals("<clinit>")){
handleAssert(sootMethod);
handleStaticFieldInits(sootMethod);
handleStaticInitializerBlocks(sootMethod);
}
// determine if body has a return stmt
boolean hasReturn = false;
if (block != null) {
Iterator it = block.statements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Return){
hasReturn = true;
}
}
}
soot.Type retType = body.getMethod().getReturnType();
// only do this if noexplicit return
if ((!hasReturn) && (retType instanceof soot.VoidType)) {
soot.jimple.Stmt retStmt = soot.jimple.Jimple.v().newReturnVoidStmt();
body.getUnits().add(retStmt);
}
// add exceptions from exceptionTable
if (exceptionTable != null) {
Iterator trapsIt = exceptionTable.iterator();
while (trapsIt.hasNext()){
body.getTraps().add((soot.Trap)trapsIt.next());
}
}
return body;
}
private void handleAssert(soot.SootMethod sootMethod){
if (!((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).hasAssert()) return;
((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).addAssertInits(body);
}
/**
* adds any needed field inits
*/
private void handleFieldInits(soot.SootMethod sootMethod) {
ArrayList fieldInits = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getFieldInits();
if (fieldInits != null) {
handleFieldInits(fieldInits);
}
}
protected void handleFieldInits(ArrayList fieldInits){
Iterator fieldInitsIt = fieldInits.iterator();
while (fieldInitsIt.hasNext()) {
polyglot.ast.FieldDecl field = (polyglot.ast.FieldDecl)fieldInitsIt.next();
String fieldName = field.name();
polyglot.ast.Expr initExpr = field.init();
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
soot.SootFieldRef sootField = soot.Scene.v().makeFieldRef(currentClass, fieldName, Util.getSootType(field.type().type()), field.flags().isStatic());
soot.Local base = specialThisLocal;
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(base, sootField);
soot.Value sootExpr;
if (initExpr instanceof polyglot.ast.ArrayInit) {
sootExpr = getArrayInitLocal((polyglot.ast.ArrayInit)initExpr, field.type().type());
}
else {
//System.out.println("field init expr: "+initExpr);
sootExpr = base().createExpr(initExpr);
//System.out.println("soot expr: "+sootExpr);
}
if (sootExpr instanceof soot.jimple.ConditionExpr) {
sootExpr = handleCondBinExpr((soot.jimple.ConditionExpr)sootExpr);
}
soot.jimple.AssignStmt assign;
if (sootExpr instanceof soot.Local){
assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, (soot.Local)sootExpr);
}
else if (sootExpr instanceof soot.jimple.Constant){
assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, (soot.jimple.Constant)sootExpr);
}
else {
throw new RuntimeException("fields must assign to local or constant only");
}
body.getUnits().add(assign);
Util.addLnPosTags(assign, initExpr.position());
Util.addLnPosTags(assign.getRightOpBox(), initExpr.position());
}
}
/**
* adds this field for the outer class
*/
private void handleOuterClassThisInit(soot.SootMethod sootMethod) {
// static inner classes are different
if (body.getMethod().getDeclaringClass().declaresFieldByName("this$0")){
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, body.getMethod().getDeclaringClass().getFieldByName("this$0").makeRef());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(fieldRef, outerClassParamLocal);
body.getUnits().add(stmt);
}
}
/**
* adds any needed static field inits
*/
private void handleStaticFieldInits(soot.SootMethod sootMethod) {
ArrayList staticFieldInits = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getStaticFieldInits();
if (staticFieldInits != null) {
Iterator staticFieldInitsIt = staticFieldInits.iterator();
while (staticFieldInitsIt.hasNext()) {
polyglot.ast.FieldDecl field = (polyglot.ast.FieldDecl)staticFieldInitsIt.next();
String fieldName = field.name();
polyglot.ast.Expr initExpr = field.init();
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
soot.SootFieldRef sootField = soot.Scene.v().makeFieldRef(currentClass, fieldName, Util.getSootType(field.type().type()), field.flags().isStatic());
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(sootField);
//System.out.println("initExpr: "+initExpr);
soot.Value sootExpr;
if (initExpr instanceof polyglot.ast.ArrayInit) {
sootExpr = getArrayInitLocal((polyglot.ast.ArrayInit)initExpr, field.type().type());
}
else {
//System.out.println("field init expr: "+initExpr);
sootExpr = base().createExpr(initExpr);
//System.out.println("soot expr: "+sootExpr);
if (sootExpr instanceof soot.jimple.ConditionExpr) {
sootExpr = handleCondBinExpr((soot.jimple.ConditionExpr)sootExpr);
}
}
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, initExpr.position());
}
}
}
/**
* init blocks get created within init methods in Jimple
*/
private void handleInitializerBlocks(soot.SootMethod sootMethod) {
ArrayList initializerBlocks = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getInitializerBlocks();
if (initializerBlocks != null) {
handleStaticBlocks(initializerBlocks);
}
}
protected void handleStaticBlocks(ArrayList initializerBlocks){
Iterator initBlocksIt = initializerBlocks.iterator();
while (initBlocksIt.hasNext()) {
createBlock((polyglot.ast.Block)initBlocksIt.next());
}
}
/**
* static init blocks get created in clinit methods in Jimple
*/
private void handleStaticInitializerBlocks(soot.SootMethod sootMethod) {
ArrayList staticInitializerBlocks = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getStaticInitializerBlocks();
if (staticInitializerBlocks != null) {
Iterator staticInitBlocksIt = staticInitializerBlocks.iterator();
while (staticInitBlocksIt.hasNext()) {
createBlock((polyglot.ast.Block)staticInitBlocksIt.next());
}
}
}
/**
* create body and make it be active
*/
private void createBody(soot.SootMethod sootMethod) {
body = soot.jimple.Jimple.v().newBody(sootMethod);
sootMethod.setActiveBody(body);
}
/**
* Block creation
*/
private void createBlock(polyglot.ast.Block block){
if (block == null) return;
// handle stmts
Iterator it = block.statements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Stmt){
createStmt((polyglot.ast.Stmt)next);
}
else {
throw new RuntimeException("Unexpected - Unhandled Node");
}
}
}
/**
* Catch Formal creation - method parameters
*/
private soot.Local createCatchFormal(polyglot.ast.Formal formal){
soot.Type sootType = Util.getSootType(formal.type().type());
soot.Local formalLocal = createLocal(formal.localInstance());
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, formal.position());
Util.addLnPosTags(((soot.jimple.IdentityStmt) stmt).getRightOpBox(), formal.position());
ArrayList names = new ArrayList();
names.add(formal.name());
stmt.addTag(new soot.tagkit.ParamNamesTag(names));
return formalLocal;
}
/**
* Formal creation - method parameters
*/
private void createFormal(polyglot.ast.Formal formal, int counter){
soot.Type sootType = Util.getSootType(formal.type().type());
soot.Local formalLocal = createLocal(formal.localInstance());
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(sootType, counter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, paramRef);
body.getUnits().add(stmt);
Util.addLnPosTags(((soot.jimple.IdentityStmt) stmt).getRightOpBox(), formal.position());
Util.addLnPosTags(stmt, formal.position());
}
/**
* Literal Creation
*/
private soot.Value createLiteral(polyglot.ast.Lit lit) {
if (lit instanceof polyglot.ast.IntLit) {
polyglot.ast.IntLit intLit = (polyglot.ast.IntLit)lit;
long litValue = intLit.value();
if (intLit.kind() == polyglot.ast.IntLit.INT) {
return soot.jimple.IntConstant.v((int)litValue);
}
else {
//System.out.println(litValue);
return soot.jimple.LongConstant.v(litValue);
}
}
else if (lit instanceof polyglot.ast.StringLit) {
String litValue = ((polyglot.ast.StringLit)lit).value();
return soot.jimple.StringConstant.v(litValue);
}
else if (lit instanceof polyglot.ast.NullLit) {
return soot.jimple.NullConstant.v();
}
else if (lit instanceof polyglot.ast.FloatLit) {
polyglot.ast.FloatLit floatLit = (polyglot.ast.FloatLit)lit;
double litValue = floatLit.value();
if (floatLit.kind() == polyglot.ast.FloatLit.DOUBLE) {
return soot.jimple.DoubleConstant.v(floatLit.value());
}
else {
return soot.jimple.FloatConstant.v((float)(floatLit.value()));
}
}
else if (lit instanceof polyglot.ast.CharLit) {
char litValue = ((polyglot.ast.CharLit)lit).value();
return soot.jimple.IntConstant.v(litValue);
}
else if (lit instanceof polyglot.ast.BooleanLit) {
boolean litValue = ((polyglot.ast.BooleanLit)lit).value();
if (litValue) return soot.jimple.IntConstant.v(1);
else return soot.jimple.IntConstant.v(0);
}
else if (lit instanceof polyglot.ast.ClassLit){
return getSpecialClassLitLocal((polyglot.ast.ClassLit)lit);
}
else {
throw new RuntimeException("Unknown Literal - Unhandled: "+lit.getClass());
}
}
/**
* Local Creation
*/
// this should be used for polyglot locals and formals
private soot.Local createLocal(polyglot.types.LocalInstance localInst) {
soot.Type sootType = Util.getSootType(localInst.type());
String name = localInst.name();
soot.Local sootLocal = createLocal(name, sootType);
localsMap.put(new polyglot.util.IdentityKey(localInst), sootLocal);
return sootLocal;
}
// this should be used for generated locals only
private soot.Local createLocal(String name, soot.Type sootType) {
soot.Local sootLocal = soot.jimple.Jimple.v().newLocal(name, sootType);
body.getLocals().add(sootLocal);
return sootLocal;
}
/**
* Local Retreival
*/
private soot.Local getLocal(polyglot.ast.Local local) {
return getLocal(local.localInstance());
}
/**
* Local Retreival
*/
private soot.Local getLocal(polyglot.types.LocalInstance li) {
if (localsMap.containsKey(new polyglot.util.IdentityKey(li))){
soot.Local sootLocal = (soot.Local)localsMap.get(new polyglot.util.IdentityKey(li));
return sootLocal;
}
else if (body.getMethod().getDeclaringClass().declaresField("val$"+li.name(), Util.getSootType(li.type()))){
soot.Local fieldLocal = generateLocal(li.type());
soot.SootFieldRef field = soot.Scene.v().makeFieldRef(body.getMethod().getDeclaringClass(), "val$"+li.name(), Util.getSootType(li.type()), false);
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, field);
soot.jimple.AssignStmt assign = soot.jimple.Jimple.v().newAssignStmt(fieldLocal, fieldRef);
body.getUnits().add(assign);
return fieldLocal;
}
/*else {
throw new RuntimeException("Trying unsuccessfully to get local: "+li.name());
}*/
else {
//else create access meth in outer for val$fieldname
// get the this$0 field to find the type of an outer class - has
// to have one because local/anon inner can't declare static
// memebers so for deepnesting not in static context for these
// cases
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
boolean fieldFound = false;
while (!fieldFound){
if (!currentClass.declaresFieldByName("this$0")){
throw new RuntimeException("Trying to get field val$"+li.name()+" from some outer class but can't access the outer class of: "+currentClass.getName()+"!"+" current class contains fields: "+currentClass.getFields());
}
soot.SootClass outerClass = ((soot.RefType)currentClass.getFieldByName("this$0").getType()).getSootClass();
// look for field of type li.type and name val$li.name in outer
// class
if (outerClass.declaresField("val$"+li.name(), Util.getSootType(li.type()))){
fieldFound = true;
}
currentClass = outerClass;
// repeat until found in some outer class
}
// create and add accessor to that outer class (indic as current)
soot.SootMethod methToInvoke = makeLiFieldAccessMethod(currentClass, li);
// invoke and return
// generate a local that corresponds to the invoke of that meth
ArrayList methParams = new ArrayList();
methParams.add(getThis(currentClass.getType()));
soot.Local res = Util.getPrivateAccessFieldInvoke(methToInvoke.makeRef(), methParams, body, lg);
return res;
}
}
private soot.SootMethod makeLiFieldAccessMethod(soot.SootClass classToInvoke, polyglot.types.LocalInstance li){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
paramTypes.add(classToInvoke.getType());
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, Util.getSootType(li.type()), soot.Modifier.STATIC);
classToInvoke.addMethod(meth);
PrivateFieldAccMethodSource src = new PrivateFieldAccMethodSource(
Util.getSootType(li.type()),
"val$"+li.name(),
false,
classToInvoke
);
meth.setActiveBody(src.getBody(meth, null));
meth.addTag(new soot.tagkit.SyntheticTag());
return meth;
}
/**
* Stmt creation
*/
protected void createStmt(polyglot.ast.Stmt stmt) {
//System.out.println("stmt: "+stmt.getClass());
if (stmt instanceof polyglot.ast.Eval) {
base().createExpr(((polyglot.ast.Eval)stmt).expr());
}
else if (stmt instanceof polyglot.ast.If) {
createIf((polyglot.ast.If)stmt);
}
else if (stmt instanceof polyglot.ast.LocalDecl) {
createLocalDecl((polyglot.ast.LocalDecl)stmt);
}
else if (stmt instanceof polyglot.ast.Block) {
createBlock((polyglot.ast.Block)stmt);
}
else if (stmt instanceof polyglot.ast.While) {
createWhile((polyglot.ast.While)stmt);
}
else if (stmt instanceof polyglot.ast.Do) {
createDo((polyglot.ast.Do)stmt);
}
else if (stmt instanceof polyglot.ast.For) {
createForLoop((polyglot.ast.For)stmt);
}
else if (stmt instanceof polyglot.ast.Switch) {
createSwitch((polyglot.ast.Switch)stmt);
}
else if (stmt instanceof polyglot.ast.Return) {
createReturn((polyglot.ast.Return)stmt);
}
else if (stmt instanceof polyglot.ast.Branch) {
createBranch((polyglot.ast.Branch)stmt);
}
else if (stmt instanceof polyglot.ast.ConstructorCall) {
createConstructorCall((polyglot.ast.ConstructorCall)stmt);
}
else if (stmt instanceof polyglot.ast.Empty) {
// do nothing empty stmt
}
else if (stmt instanceof polyglot.ast.Throw) {
createThrow((polyglot.ast.Throw)stmt);
}
else if (stmt instanceof polyglot.ast.Try) {
createTry((polyglot.ast.Try)stmt);
}
else if (stmt instanceof polyglot.ast.Labeled) {
createLabeled((polyglot.ast.Labeled)stmt);
}
else if (stmt instanceof polyglot.ast.Synchronized) {
createSynchronized((polyglot.ast.Synchronized)stmt);
}
else if (stmt instanceof polyglot.ast.Assert) {
createAssert((polyglot.ast.Assert)stmt);
}
else if (stmt instanceof polyglot.ast.LocalClassDecl) {
createLocalClassDecl((polyglot.ast.LocalClassDecl)stmt);
}
else {
throw new RuntimeException("Unhandled Stmt: "+stmt.getClass());
}
}
private boolean needSootIf(soot.Value sootCond){
if (sootCond instanceof soot.jimple.IntConstant){
if (((soot.jimple.IntConstant)sootCond).value == 1){
return false;
}
}
return true;
}
/**
* If Stmts Creation - only add line-number tags to if (the other
* stmts needing tags are created elsewhere
*/
private void createIf(polyglot.ast.If ifExpr){
// handle cond
polyglot.ast.Expr condition = ifExpr.cond();
soot.Value sootCond = base().createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
// add if
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
if (needIf) {
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
// add consequence
polyglot.ast.Stmt consequence = ifExpr.consequent();
createStmt(consequence);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
// handle alternative
polyglot.ast.Stmt alternative = ifExpr.alternative();
if (alternative != null){
createStmt(alternative);
}
body.getUnits().add(noop2);
}
/**
* While Stmts Creation
*/
private void createWhile(polyglot.ast.While whileStmt){
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
// these are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
body.getUnits().add(noop2);
// handle cond
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
polyglot.ast.Expr condition = whileStmt.cond();
soot.Value sootCond = base().createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
/*else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}*/
// handle body
//soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
//soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
//body.getUnits().add(goto1);
//body.getUnits().add(noop1);
createStmt(whileStmt.body());
soot.jimple.GotoStmt gotoLoop = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(gotoLoop);
//body.getUnits().add(noop2);
// handle cond
//body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
//polyglot.ast.Expr condition = whileStmt.cond();
//soot.Value sootCond = base().createExpr(condition);
//boolean needIf = needSootIf(sootCond);
//if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
// sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
//else {
// sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
// sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
//if (needIf){
// soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
// body.getUnits().add(ifStmt);
// Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
// Util.addLnPosTags(ifStmt, condition.position());
//else {
// soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
// body.getUnits().add(gotoIf);
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
body.getUnits().add(noop1);
}
/**
* DoWhile Stmts Creation
*/
private void createDo(polyglot.ast.Do doStmt){
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
// these are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle body
createStmt(doStmt.body());
// handle cond
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
// handle label continue
if ((labelContinueMap != null) && (labelContinueMap.containsKey(lastLabel))){
body.getUnits().add((soot.jimple.Stmt)labelContinueMap.get(lastLabel));
}
polyglot.ast.Expr condition = doStmt.cond();
soot.Value sootCond = base().createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addPosTag(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* For Loop Stmts Creation
*/
private void createForLoop(polyglot.ast.For forStmt){
// these ()are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle for inits
Iterator initsIt = forStmt.inits().iterator();
while (initsIt.hasNext()){
createStmt((polyglot.ast.Stmt)initsIt.next());
}
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// handle cond
polyglot.ast.Expr condition = forStmt.cond();
if (condition != null) {
soot.Value sootCond = base().createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
// add cond
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
}
else {
soot.jimple.Stmt goto2 = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(goto2);
}
// handle body
//soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
//soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
//body.getUnits().add(goto1);
//body.getUnits().add(noop1);
createStmt(forStmt.body());
// handle continue
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
// handle label continue
if ((labelContinueMap != null) && (labelContinueMap.containsKey(lastLabel))){
body.getUnits().add((soot.jimple.Stmt)labelContinueMap.get(lastLabel));
}
// handle iters
Iterator itersIt = forStmt.iters().iterator();
//System.out.println("for iters: "+forStmt.iters());
while (itersIt.hasNext()){
createStmt((polyglot.ast.Stmt)itersIt.next());
}
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
//body.getUnits().add(noop2);
// handle cond
/*polyglot.ast.Expr condition = forStmt.cond();
if (condition != null) {
soot.Value sootCond = base().createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
// add cond
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
}
else {
soot.jimple.Stmt goto2 = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(goto2);
}*/
body.getUnits().add(noop1);
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* Local Decl Creation
*/
private void createLocalDecl(polyglot.ast.LocalDecl localDecl) {
String name = localDecl.name();
polyglot.types.LocalInstance localInst = localDecl.localInstance();
soot.Value lhs = createLocal(localInst);
polyglot.ast.Expr expr = localDecl.init();
if (expr != null) {
//System.out.println("expr: "+expr+" get type: "+expr.getClass());
soot.Value rhs;
if (expr instanceof polyglot.ast.ArrayInit){
//System.out.println("creating array from localdecl: "+localInst.type());
rhs = getArrayInitLocal((polyglot.ast.ArrayInit)expr, localInst.type());
}
else {
rhs = base().createExpr(expr);
}
if (rhs instanceof soot.jimple.ConditionExpr) {
rhs = handleCondBinExpr((soot.jimple.ConditionExpr)rhs);
}
//System.out.println("rhs: "+rhs);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(lhs, rhs);
body.getUnits().add(stmt);
//Util.addLineTag(stmt, localDecl);
Util.addLnPosTags(stmt, localDecl.position());
// this is a special case for position tags
if ( localDecl.position() != null){
Util.addLnPosTags(stmt.getLeftOpBox(), localDecl.position().line(), localDecl.position().endLine(), localDecl.position().endColumn()-name.length(), localDecl.position().endColumn());
if (expr != null){
Util.addLnPosTags(stmt, localDecl.position().line(), expr.position().endLine(), localDecl.position().column(), expr.position().endColumn());
}
else {
Util.addLnPosTags(stmt, localDecl.position().line(), localDecl.position().endLine(), localDecl.position().column(), localDecl.position().endColumn());
}
}
else {
}
if (expr != null){
Util.addLnPosTags(stmt.getRightOpBox(), expr.position());
}
}
}
/**
* Switch Stmts Creation
*/
private void createSwitch(polyglot.ast.Switch switchStmt) {
polyglot.ast.Expr value = switchStmt.expr();
soot.Value sootValue = base().createExpr(value);
soot.jimple.Stmt defaultTarget = null;
polyglot.ast.Case [] caseArray = new polyglot.ast.Case[switchStmt.elements().size()];
soot.jimple.Stmt [] targetsArray = new soot.jimple.Stmt[switchStmt.elements().size()];
ArrayList targets = new ArrayList();
HashMap targetsMap = new HashMap();
int counter = 0;
Iterator it = switchStmt.elements().iterator();
while (it.hasNext()) {
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
if (!((polyglot.ast.Case)next).isDefault()){
targets.add(noop);
caseArray[counter] = (polyglot.ast.Case)next;
targetsArray[counter] = noop;
counter++;
targetsMap.put(next, noop);
}
else {
defaultTarget = noop;
}
}
}
// sort targets map
int lowIndex = 0;
int highIndex = 0;
for (int i = 0; i < counter; i++) {
for (int j = i+1; j < counter; j++) {
if (caseArray[j].value() < caseArray[i].value()) {
polyglot.ast.Case tempCase = caseArray[i];
soot.jimple.Stmt tempTarget = targetsArray[i];
caseArray[i] = caseArray[j];
targetsArray[i] = targetsArray[j];
caseArray[j] = tempCase;
targetsArray[j] = tempTarget;
}
}
}
ArrayList sortedTargets = new ArrayList();
for (int i = 0; i < counter; i++) {
sortedTargets.add(targetsArray[i]);
}
// deal with default
boolean hasDefaultTarget = true;
if (defaultTarget == null) {
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
defaultTarget = noop;
hasDefaultTarget = false;
}
// lookup or tableswitch
soot.jimple.Stmt sootSwitchStmt;
if (isLookupSwitch(switchStmt)) {
ArrayList values = new ArrayList();
for (int i = 0; i < counter; i++) {
if (!caseArray[i].isDefault()) {
values.add(soot.jimple.IntConstant.v((int)caseArray[i].value()));
}
}
soot.jimple.LookupSwitchStmt lookupStmt = soot.jimple.Jimple.v().newLookupSwitchStmt(sootValue, values, sortedTargets, defaultTarget);
Util.addLnPosTags(lookupStmt.getKeyBox(), value.position());
sootSwitchStmt = lookupStmt;
}
else {
long lowVal = 0;
long highVal = 0;
boolean unknown = true;
it = switchStmt.elements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
if (!((polyglot.ast.Case)next).isDefault()){
long temp = ((polyglot.ast.Case)next).value();
if (unknown){
highVal = temp;
lowVal = temp;
unknown = false;
}
if (temp > highVal) {
highVal = temp;
}
if (temp < lowVal) {
lowVal = temp;
}
}
}
}
soot.jimple.TableSwitchStmt tableStmt = soot.jimple.Jimple.v().newTableSwitchStmt(sootValue, (int)lowVal, (int)highVal, sortedTargets, defaultTarget);
Util.addLnPosTags(tableStmt.getKeyBox(), value.position());
sootSwitchStmt = tableStmt;
}
body.getUnits().add(sootSwitchStmt);
Util.addLnPosTags(sootSwitchStmt, switchStmt.position());
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
it = switchStmt.elements().iterator();
Iterator targetsIt = targets.iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
if (!((polyglot.ast.Case)next).isDefault()){
body.getUnits().add((soot.jimple.Stmt)targetsMap.get(next));
}
else {
body.getUnits().add(defaultTarget);
}
}
else {
polyglot.ast.SwitchBlock blockStmt = (polyglot.ast.SwitchBlock)next;
createBlock(blockStmt);
}
}
if (!hasDefaultTarget) {
body.getUnits().add(defaultTarget);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* Determine if switch should be lookup or table - this doesn't
* always get the same result as javac
* lookup: non-table
* table: sequential (no gaps)
*/
private boolean isLookupSwitch(polyglot.ast.Switch switchStmt){
int lowest = 0;
int highest = 0;
int counter = 0;
Iterator it = switchStmt.elements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
polyglot.ast.Case caseStmt = (polyglot.ast.Case)next;
if (caseStmt.isDefault()) continue;
int caseValue = (int)caseStmt.value();
if (caseValue <= lowest || counter == 0 ) {
lowest = caseValue;
}
if (caseValue >= highest || counter == 0) {
highest = caseValue;
}
counter++;
}
}
if ((counter-1) == (highest - lowest)) return false;
return true;
}
/**
* Branch Stmts Creation
*/
private void createBranch(polyglot.ast.Branch branchStmt){
body.getUnits().add(soot.jimple.Jimple.v().newNopStmt());
if (branchStmt.kind() == polyglot.ast.Branch.BREAK){
if (branchStmt.label() == null) {
soot.jimple.Stmt gotoEndNoop = (soot.jimple.Stmt)endControlNoop.pop();
soot.jimple.Stmt gotoEnd = soot.jimple.Jimple.v().newGotoStmt(gotoEndNoop);
endControlNoop.push(gotoEndNoop);
body.getUnits().add(gotoEnd);
Util.addLnPosTags(gotoEnd, branchStmt.position());
}
else {
soot.jimple.Stmt gotoLabel = soot.jimple.Jimple.v().newGotoStmt((soot.jimple.Stmt)labelBreakMap.get(branchStmt.label()));
body.getUnits().add(gotoLabel);
Util.addLnPosTags(gotoLabel, branchStmt.position());
}
}
else if (branchStmt.kind() == polyglot.ast.Branch.CONTINUE){
if (branchStmt.label() == null) {
soot.jimple.Stmt gotoCondNoop = (soot.jimple.Stmt)condControlNoop.pop();
soot.jimple.Stmt gotoCond = soot.jimple.Jimple.v().newGotoStmt(gotoCondNoop);
condControlNoop.push(gotoCondNoop);
body.getUnits().add(gotoCond);
Util.addLnPosTags(gotoCond, branchStmt.position());
}
else {
soot.jimple.Stmt gotoLabel = soot.jimple.Jimple.v().newGotoStmt((soot.jimple.Stmt)labelContinueMap.get(branchStmt.label()));
body.getUnits().add(gotoLabel);
Util.addLnPosTags(gotoLabel, branchStmt.position());
}
}
}
/**
* Labeled Stmt Creation
*/
private void createLabeled(polyglot.ast.Labeled labeledStmt){
String label = labeledStmt.label();
lastLabel = label;
polyglot.ast.Stmt stmt = labeledStmt.statement();
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
if (!(stmt instanceof polyglot.ast.For) && !(stmt instanceof polyglot.ast.Do)){
body.getUnits().add(noop);
}
if (labelBreakMap == null) {
labelBreakMap = new HashMap();
}
if (labelContinueMap == null) {
labelContinueMap = new HashMap();
}
labelContinueMap.put(label, noop);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
labelBreakMap.put(label, noop2);
createStmt(stmt);
body.getUnits().add(noop2);
// the idea here is to make a map of labels to the first
// jimple stmt of the stmt (a noop) to be created - so
// there is something to look up for breaks and continues
// with labels
}
/**
* Assert Stmt Creation
*/
private void createAssert(polyglot.ast.Assert assertStmt) {
// check if assertions are disabled
soot.Local testLocal = lg.generateLocal(soot.BooleanType.v());
soot.SootFieldRef assertField = soot.Scene.v().makeFieldRef(body.getMethod().getDeclaringClass(), "$assertionsDisabled", soot.BooleanType.v(), true);
soot.jimple.FieldRef assertFieldRef = soot.jimple.Jimple.v().newStaticFieldRef(assertField);
soot.jimple.AssignStmt fieldAssign = soot.jimple.Jimple.v().newAssignStmt(testLocal, assertFieldRef);
body.getUnits().add(fieldAssign);
soot.jimple.NopStmt nop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.ConditionExpr cond1 = soot.jimple.Jimple.v().newNeExpr(testLocal, soot.jimple.IntConstant.v(0));
soot.jimple.IfStmt testIf = soot.jimple.Jimple.v().newIfStmt(cond1, nop1);
body.getUnits().add(testIf);
// actual cond test
if ((assertStmt.cond() instanceof polyglot.ast.BooleanLit) && (!((polyglot.ast.BooleanLit)assertStmt.cond()).value())){
// don't makeif
}
else {
soot.Value sootCond = base().createExpr(assertStmt.cond());
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(1));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
// add if
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, nop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt.getConditionBox(), assertStmt.cond().position());
Util.addLnPosTags(ifStmt, assertStmt.position());
}
}
// assertion failure code
soot.Local failureLocal = lg.generateLocal(soot.RefType.v("java.lang.AssertionError"));
soot.jimple.NewExpr newExpr = soot.jimple.Jimple.v().newNewExpr(soot.RefType.v("java.lang.AssertionError"));
soot.jimple.AssignStmt newAssign = soot.jimple.Jimple.v().newAssignStmt(failureLocal, newExpr);
body.getUnits().add(newAssign);
soot.SootMethodRef methToInvoke;
ArrayList paramTypes = new ArrayList();
ArrayList params = new ArrayList();
if (assertStmt.errorMessage() != null){
soot.Value errorExpr = base().createExpr(assertStmt.errorMessage());
if (errorExpr instanceof soot.jimple.ConditionExpr) {
errorExpr = handleCondBinExpr((soot.jimple.ConditionExpr)errorExpr);
}
soot.Type errorType = errorExpr.getType();
if (assertStmt.errorMessage().type().isChar()){
errorType = soot.CharType.v();
}
if (errorType instanceof soot.IntType) {
paramTypes.add(soot.IntType.v());
}
else if (errorType instanceof soot.LongType){
paramTypes.add(soot.LongType.v());
}
else if (errorType instanceof soot.FloatType){
paramTypes.add(soot.FloatType.v());
}
else if (errorType instanceof soot.DoubleType){
paramTypes.add(soot.DoubleType.v());
}
else if (errorType instanceof soot.CharType){
paramTypes.add(soot.CharType.v());
}
else if (errorType instanceof soot.BooleanType){
paramTypes.add(soot.BooleanType.v());
}
else if (errorType instanceof soot.ShortType){
paramTypes.add(soot.IntType.v());
}
else if (errorType instanceof soot.ByteType){
paramTypes.add(soot.IntType.v());
}
else {
paramTypes.add(soot.Scene.v().getSootClass("java.lang.Object").getType());
}
params.add(errorExpr);
}
methToInvoke = soot.Scene.v().makeMethodRef( soot.Scene.v().getSootClass("java.lang.AssertionError"), "<init>", paramTypes, soot.VoidType.v(), false);
soot.jimple.SpecialInvokeExpr invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(failureLocal, methToInvoke, params);
soot.jimple.InvokeStmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(invokeExpr);
body.getUnits().add(invokeStmt);
if (assertStmt.errorMessage() != null){
Util.addLnPosTags(invokeExpr.getArgBox(0), assertStmt.errorMessage().position());
}
soot.jimple.ThrowStmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(failureLocal);
body.getUnits().add(throwStmt);
// end
body.getUnits().add(nop1);
}
/**
* Synchronized Stmt Creation
*/
private void createSynchronized(polyglot.ast.Synchronized synchStmt) {
soot.Value sootExpr = base().createExpr(synchStmt.expr());
soot.jimple.EnterMonitorStmt enterMon = soot.jimple.Jimple.v().newEnterMonitorStmt(sootExpr);
body.getUnits().add(enterMon);
if (monitorStack == null){
monitorStack = new Stack();
}
monitorStack.push(sootExpr);
Util.addLnPosTags(enterMon.getOpBox(), synchStmt.expr().position());
Util.addLnPosTags(enterMon, synchStmt.expr().position());
soot.jimple.Stmt startNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(startNoop);
createBlock(synchStmt.body());
soot.jimple.ExitMonitorStmt exitMon = soot.jimple.Jimple.v().newExitMonitorStmt(sootExpr);
body.getUnits().add(exitMon);
monitorStack.pop();
Util.addLnPosTags(exitMon.getOpBox(), synchStmt.expr().position());
Util.addLnPosTags(exitMon, synchStmt.expr().position());
soot.jimple.Stmt endSynchNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt gotoEnd = soot.jimple.Jimple.v().newGotoStmt(endSynchNoop);
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(endNoop);
body.getUnits().add(gotoEnd);
soot.jimple.Stmt catchAllBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAllBeforeNoop);
// catch all
soot.Local formalLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
// catch
soot.jimple.Stmt catchBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchBeforeNoop);
soot.Local local = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, formalLocal);
body.getUnits().add(assign);
soot.jimple.ExitMonitorStmt catchExitMon = soot.jimple.Jimple.v().newExitMonitorStmt(sootExpr);
body.getUnits().add(catchExitMon);
Util.addLnPosTags(catchExitMon.getOpBox(), synchStmt.expr().position());
soot.jimple.Stmt catchAfterNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAfterNoop);
// throw
soot.jimple.Stmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(local);
body.getUnits().add(throwStmt);
body.getUnits().add(endSynchNoop);
addToExceptionList(startNoop, endNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
addToExceptionList(catchBeforeNoop, catchAfterNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
/**
* Return Stmts Creation
*/
private void createReturn(polyglot.ast.Return retStmt) {
polyglot.ast.Expr expr = retStmt.expr();
soot.Value sootLocal = null;
if (expr != null){
sootLocal = base().createExpr(expr);
}
// handle monitor exits before return if necessary
if (monitorStack != null){
Stack putBack = new Stack();
while (!monitorStack.isEmpty()){
soot.Local exitVal = (soot.Local)monitorStack.pop();
putBack.push(exitVal);
soot.jimple.ExitMonitorStmt emStmt = soot.jimple.Jimple.v().newExitMonitorStmt(exitVal);
body.getUnits().add(emStmt);
}
while(!putBack.isEmpty()){
monitorStack.push(putBack.pop());
}
}
//handle finally blocks before return if inside try block
if (tryStack != null && !tryStack.isEmpty()){
polyglot.ast.Try currentTry = (polyglot.ast.Try)tryStack.pop();
if (currentTry.finallyBlock() != null){
createBlock(currentTry.finallyBlock());
tryStack.push(currentTry);
// if return stmt contains a return don't create the other return
ReturnStmtChecker rsc = new ReturnStmtChecker();
currentTry.finallyBlock().visit(rsc);
if (rsc.hasRet()){
return;
}
}
else {
tryStack.push(currentTry);
}
}
//handle finally blocks before return if inside catch block
if (catchStack != null && !catchStack.isEmpty()){
polyglot.ast.Try currentTry = (polyglot.ast.Try)catchStack.pop();
if (currentTry.finallyBlock() != null){
createBlock(currentTry.finallyBlock());
catchStack.push(currentTry);
// if return stmt contains a return don't create the other return
ReturnStmtChecker rsc = new ReturnStmtChecker();
currentTry.finallyBlock().visit(rsc);
if (rsc.hasRet()){
return;
}
}
else {
catchStack.push(currentTry);
}
}
// return
if (expr == null) {
soot.jimple.Stmt retStmtVoid = soot.jimple.Jimple.v().newReturnVoidStmt();
body.getUnits().add(retStmtVoid);
Util.addLnPosTags(retStmtVoid, retStmt.position());
}
else {
//soot.Value sootLocal = createExpr(expr);
if (sootLocal instanceof soot.jimple.ConditionExpr) {
sootLocal = handleCondBinExpr((soot.jimple.ConditionExpr)sootLocal);
}
soot.jimple.ReturnStmt retStmtLocal = soot.jimple.Jimple.v().newReturnStmt(sootLocal);
body.getUnits().add(retStmtLocal);
Util.addLnPosTags(retStmtLocal.getOpBox(), expr.position());
Util.addLnPosTags(retStmtLocal, retStmt.position());
}
}
/**
* Throw Stmt Creation
*/
private void createThrow(polyglot.ast.Throw throwStmt){
soot.Value toThrow = base().createExpr(throwStmt.expr());
soot.jimple.ThrowStmt throwSt = soot.jimple.Jimple.v().newThrowStmt(toThrow);
body.getUnits().add(throwSt);
Util.addLnPosTags(throwSt, throwStmt.position());
Util.addLnPosTags(throwSt.getOpBox(), throwStmt.expr().position());
}
/**
* Try Stmt Creation
*/
private void createTry(polyglot.ast.Try tryStmt) {
polyglot.ast.Block finallyBlock = tryStmt.finallyBlock();
if (finallyBlock == null) {
createTryCatch(tryStmt);
}
else {
createTryCatchFinally(tryStmt);
}
}
/**
* handles try/catch (try/catch/finally is separate for simplicity)
*/
private void createTryCatch(polyglot.ast.Try tryStmt){
// try
polyglot.ast.Block tryBlock = tryStmt.tryBlock();
// this nop is for the fromStmt of try
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
if (tryStack == null){
tryStack = new Stack();
}
tryStack.push(tryStmt);
createBlock(tryBlock);
tryStack.pop();
// this nop is for the toStmt of try
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// create end nop for after entire try/catch
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt tryEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(tryEndGoto);
Iterator it = tryStmt.catchBlocks().iterator();
while (it.hasNext()) {
soot.jimple.Stmt noop3 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop3);
// create catch stmts
polyglot.ast.Catch catchBlock = (polyglot.ast.Catch)it.next();
// create catch ref
createCatchFormal(catchBlock.formal());
if (catchStack == null){
catchStack = new Stack();
}
catchStack.push(tryStmt);
createBlock(catchBlock.body());
catchStack.pop();
soot.jimple.Stmt catchEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchEndGoto);
soot.Type sootType = Util.getSootType(catchBlock.catchType());
addToExceptionList(noop1, noop2, noop3, soot.Scene.v().getSootClass(sootType.toString()));
}
body.getUnits().add(endNoop);
}
/**
* handles try/catch/finally (try/catch is separate for simplicity)
*/
private void createTryCatchFinally(polyglot.ast.Try tryStmt){
HashMap gotoMap = new HashMap();
// try
polyglot.ast.Block tryBlock = tryStmt.tryBlock();
// this nop is for the fromStmt of try
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
if (tryStack == null){
tryStack = new Stack();
}
tryStack.push(tryStmt);
createBlock(tryBlock);
tryStack.pop();
// this nop is for the toStmt of try
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// create end nop for after entire try/catch
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
// to finally
soot.jimple.Stmt tryGotoFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(tryGotoFinallyNoop);
soot.jimple.Stmt tryFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt tryGotoFinally = soot.jimple.Jimple.v().newGotoStmt(tryFinallyNoop);
body.getUnits().add(tryGotoFinally);
// goto end stmts
soot.jimple.Stmt beforeEndGotoNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeEndGotoNoop);
soot.jimple.Stmt tryEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(tryEndGoto);
gotoMap.put(tryFinallyNoop, beforeEndGotoNoop);
// catch section
soot.jimple.Stmt catchAllBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
Iterator it = tryStmt.catchBlocks().iterator();
while (it.hasNext()) {
soot.jimple.Stmt noop3 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop3);
// create catch stmts
polyglot.ast.Catch catchBlock = (polyglot.ast.Catch)it.next();
// create catch ref
soot.jimple.Stmt catchRefNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchRefNoop);
createCatchFormal(catchBlock.formal());
soot.jimple.Stmt catchStmtsNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchStmtsNoop);
if (catchStack == null){
catchStack = new Stack();
}
catchStack.push(tryStmt);
createBlock(catchBlock.body());
catchStack.pop();
// to finally
soot.jimple.Stmt catchGotoFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchGotoFinallyNoop);
soot.jimple.Stmt catchFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt catchGotoFinally = soot.jimple.Jimple.v().newGotoStmt(catchFinallyNoop);
body.getUnits().add(catchGotoFinally);
// goto end stmts
soot.jimple.Stmt beforeCatchEndGotoNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeCatchEndGotoNoop);
soot.jimple.Stmt catchEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchEndGoto);
gotoMap.put(catchFinallyNoop, beforeCatchEndGotoNoop);
soot.Type sootType = Util.getSootType(catchBlock.catchType());
addToExceptionList(noop1, noop2, noop3, soot.Scene.v().getSootClass(sootType.toString()));
addToExceptionList(catchStmtsNoop, beforeCatchEndGotoNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
// catch all ref
soot.Local formalLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
body.getUnits().add(catchAllBeforeNoop);
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
// catch all assign
soot.jimple.Stmt beforeCatchAllAssignNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeCatchAllAssignNoop);
soot.Local catchAllAssignLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.Stmt catchAllAssign = soot.jimple.Jimple.v().newAssignStmt(catchAllAssignLocal, formalLocal);
body.getUnits().add(catchAllAssign);
// catch all finally
soot.jimple.Stmt catchAllFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt catchAllGotoFinally = soot.jimple.Jimple.v().newGotoStmt(catchAllFinallyNoop);
body.getUnits().add(catchAllGotoFinally);
// catch all throw
soot.jimple.Stmt catchAllBeforeThrowNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAllBeforeThrowNoop);
soot.jimple.Stmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(catchAllAssignLocal);
throwStmt.addTag(new soot.tagkit.ThrowCreatedByCompilerTag());
body.getUnits().add(throwStmt);
gotoMap.put(catchAllFinallyNoop, catchAllBeforeThrowNoop);
// catch all goto end
soot.jimple.Stmt catchAllGotoEnd = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchAllGotoEnd);
addToExceptionList(beforeCatchAllAssignNoop, catchAllBeforeThrowNoop ,catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
// create finally's
Iterator finallyIt = gotoMap.keySet().iterator();
while (finallyIt.hasNext()) {
soot.jimple.Stmt noopStmt = (soot.jimple.Stmt)finallyIt.next();
body.getUnits().add(noopStmt);
createBlock(tryStmt.finallyBlock());
soot.jimple.Stmt backToStmt = (soot.jimple.Stmt)gotoMap.get(noopStmt);
soot.jimple.Stmt backToGoto = soot.jimple.Jimple.v().newGotoStmt(backToStmt);
body.getUnits().add(backToGoto);
}
body.getUnits().add(endNoop);
addToExceptionList(noop1, beforeEndGotoNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
/**
* add exceptions to a list that gets added at end of method
*/
private void addToExceptionList(soot.jimple.Stmt from, soot.jimple.Stmt to, soot.jimple.Stmt with, soot.SootClass exceptionClass) {
if (exceptionTable == null) {
exceptionTable = new ArrayList();
}
soot.Trap trap = soot.jimple.Jimple.v().newTrap(exceptionClass, from, to, with);
exceptionTable.add(trap);
}
public soot.jimple.Constant createConstant(polyglot.ast.Expr expr){
Object constantVal = expr.constantValue();
//System.out.println("expr: "+expr);
return getConstant(constantVal, expr.type());
}
/**
* Expression Creation
*/
protected soot.Value createExpr(polyglot.ast.Expr expr){
//System.out.println("create expr: "+expr+" type: "+expr.getClass());
// maybe right here check if expr has constant val and return that
// instead
if (expr.isConstant() && expr.constantValue() != null && expr.type() != null && !(expr instanceof polyglot.ast.Binary && expr.type().toString().equals("java.lang.String")) ){
return createConstant(expr);
}
if (expr instanceof polyglot.ast.Assign) {
return getAssignLocal((polyglot.ast.Assign)expr);
}
else if (expr instanceof polyglot.ast.Lit) {
return createLiteral((polyglot.ast.Lit)expr);
}
else if (expr instanceof polyglot.ast.Local) {
return getLocal((polyglot.ast.Local)expr);
}
else if (expr instanceof polyglot.ast.Binary) {
return getBinaryLocal((polyglot.ast.Binary)expr);
}
else if (expr instanceof polyglot.ast.Unary) {
return getUnaryLocal((polyglot.ast.Unary)expr);
}
else if (expr instanceof polyglot.ast.Cast) {
return getCastLocal((polyglot.ast.Cast)expr);
}
//else if (expr instanceof polyglot.ast.ArrayInit) {
// array init are special and get created elsewhere
else if (expr instanceof polyglot.ast.ArrayAccess) {
return getArrayRefLocal((polyglot.ast.ArrayAccess)expr);
}
else if (expr instanceof polyglot.ast.NewArray) {
return getNewArrayLocal((polyglot.ast.NewArray)expr);
}
else if (expr instanceof polyglot.ast.Call) {
return getCallLocal((polyglot.ast.Call)expr);
}
else if (expr instanceof polyglot.ast.New) {
return getNewLocal((polyglot.ast.New)expr);
}
else if (expr instanceof polyglot.ast.Special) {
return getSpecialLocal((polyglot.ast.Special)expr);
}
else if (expr instanceof polyglot.ast.Instanceof) {
return getInstanceOfLocal((polyglot.ast.Instanceof)expr);
}
else if (expr instanceof polyglot.ast.Conditional) {
return getConditionalLocal((polyglot.ast.Conditional)expr);
}
else if (expr instanceof polyglot.ast.Field) {
return getFieldLocal((polyglot.ast.Field)expr);
}
else {
throw new RuntimeException("Unhandled Expression: "+expr);
}
}
protected soot.Local handlePrivateFieldUnarySet(polyglot.ast.Unary unary){
polyglot.ast.Field fLeft = (polyglot.ast.Field)unary.expr();
soot.Value base = base().getBaseLocal(fLeft.target());
soot.Value fieldGetLocal = getPrivateAccessFieldLocal(fLeft, base);
soot.Local tmp = generateLocal(fLeft.type());
soot.jimple.AssignStmt stmt1 = soot.jimple.Jimple.v().newAssignStmt(tmp, fieldGetLocal);
body.getUnits().add(stmt1);
Util.addLnPosTags(stmt1, unary.position());
soot.Value incVal = base().getConstant(Util.getSootType(fLeft.type()), 1);
soot.jimple.BinopExpr binExpr;
if (unary.operator() == polyglot.ast.Unary.PRE_INC || unary.operator() == polyglot.ast.Unary.POST_INC){
binExpr = soot.jimple.Jimple.v().newAddExpr(tmp, incVal);
}
else {
binExpr = soot.jimple.Jimple.v().newSubExpr(tmp, incVal);
}
soot.Local tmp2 = generateLocal(fLeft.type());
soot.jimple.AssignStmt assign = soot.jimple.Jimple.v().newAssignStmt(tmp2, binExpr);
body.getUnits().add(assign);
if (unary.operator() == polyglot.ast.Unary.PRE_INC || unary.operator() == polyglot.ast.Unary.PRE_DEC){
return base().handlePrivateFieldSet(fLeft, tmp2, base);
}
else {
base().handlePrivateFieldSet(fLeft, tmp2, base);
return tmp;
}
}
protected soot.Local handlePrivateFieldAssignSet(polyglot.ast.Assign assign){
polyglot.ast.Field fLeft = (polyglot.ast.Field)assign.left();
//soot.Value right = createExpr(assign.right());
// if assign is not = but +=, -=, *=, /=, >>=, >>>-, <<=, %=,
// |= &= or ^= then compute it all into a local first
//if (assign.operator() != polyglot.ast.Assign.ASSIGN){
// in this cas can cast to local (never a string const here
// as it has to be a lhs
soot.Value right;
soot.Value fieldBase = base().getBaseLocal(fLeft.target());
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
right = base().getSimpleAssignRightLocal(assign);
}
else if ((assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) && assign.type().toString().equals("java.lang.String")){
right = getStringConcatAssignRightLocal(assign);
}
else {
// here the lhs is a private field and needs to use get call
soot.Local leftLocal = getPrivateAccessFieldLocal(fLeft, fieldBase);
//soot.Local leftLocal = (soot.Local)base().createExpr(fLeft);
right = base().getAssignRightLocal(assign, leftLocal);
}
return handlePrivateFieldSet(fLeft, right, fieldBase);
}
protected soot.Local handlePrivateFieldSet(polyglot.ast.Expr expr, soot.Value right, soot.Value base){
// in normal j2j its always a field (and checked before call)
// only has an expr for param for extensibility
polyglot.ast.Field fLeft = (polyglot.ast.Field)expr;
soot.SootClass containClass = ((soot.RefType)Util.getSootType(fLeft.target().type())).getSootClass();
soot.SootMethod methToUse = addSetAccessMeth(containClass, fLeft, right);
ArrayList params = new ArrayList();
if (!fLeft.flags().isStatic()){
// this is the this ref if needed
//params.add(getThis(Util.getSootType(fLeft.target().type())));
params.add(base);
}
params.add(right);
soot.jimple.InvokeExpr invoke = soot.jimple.Jimple.v().newStaticInvokeExpr(methToUse.makeRef(), params);
soot.Local retLocal = lg.generateLocal(right.getType());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invoke);
body.getUnits().add(assignStmt);
return retLocal;
}
private soot.SootMethod addSetAccessMeth(soot.SootClass conClass, polyglot.ast.Field field, soot.Value param){
if ((InitialResolver.v().getPrivateFieldSetAccessMap() != null) && (InitialResolver.v().getPrivateFieldSetAccessMap().containsKey(new polyglot.util.IdentityKey(field.fieldInstance())))){
return (soot.SootMethod)InitialResolver.v().getPrivateFieldSetAccessMap().get(new polyglot.util.IdentityKey(field.fieldInstance()));
}
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
if (!field.flags().isStatic()){
// add this param type
paramTypes.add(conClass.getType());
//paramTypes.add(Util.getSootType(field.target().type()));
}
paramTypes.add(param.getType());
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, param.getType(), soot.Modifier.STATIC);
PrivateFieldSetMethodSource pfsms = new PrivateFieldSetMethodSource(
Util.getSootType(field.type()),
field.name(),
field.flags().isStatic()
);
conClass.addMethod(meth);
meth.setActiveBody(pfsms.getBody(meth, null));
InitialResolver.v().addToPrivateFieldSetAccessMap(field, meth);
meth.addTag(new soot.tagkit.SyntheticTag());
return meth;
}
private soot.SootMethod addGetFieldAccessMeth(soot.SootClass conClass, polyglot.ast.Field field){
if ((InitialResolver.v().getPrivateFieldGetAccessMap() != null) && (InitialResolver.v().getPrivateFieldGetAccessMap().containsKey(new polyglot.util.IdentityKey(field.fieldInstance())))){
return (soot.SootMethod)InitialResolver.v().getPrivateFieldGetAccessMap().get(new polyglot.util.IdentityKey(field.fieldInstance()));
}
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
if (!field.flags().isStatic()){
// add this param type
paramTypes.add(conClass.getType());//(soot.Local)getBaseLocal(field.target()));
//paramTypes.add(Util.getSootType(field.target().type()));
}
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, Util.getSootType(field.type()), soot.Modifier.STATIC);
PrivateFieldAccMethodSource pfams = new PrivateFieldAccMethodSource(
Util.getSootType(field.type()),
field.name(),
field.flags().isStatic(),
conClass
);
conClass.addMethod(meth);
meth.setActiveBody(pfams.getBody(meth, null));
InitialResolver.v().addToPrivateFieldGetAccessMap(field, meth);
meth.addTag(new soot.tagkit.SyntheticTag());
return meth;
}
private soot.SootMethod addGetMethodAccessMeth(soot.SootClass conClass, polyglot.ast.Call call){
if ((InitialResolver.v().getPrivateMethodGetAccessMap() != null) && (InitialResolver.v().getPrivateMethodGetAccessMap().containsKey(new polyglot.util.IdentityKey(call.methodInstance())))){
return (soot.SootMethod)InitialResolver.v().getPrivateMethodGetAccessMap().get(new polyglot.util.IdentityKey(call.methodInstance()));
}
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
if (!call.methodInstance().flags().isStatic()){
// add this param type
//paramTypes.add(Util.getSootType(call.methodInstance().container()));
paramTypes.add(conClass.getType());
}
ArrayList sootParamsTypes = getSootParamsTypes(call);
paramTypes.addAll(sootParamsTypes);
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, Util.getSootType(call.methodInstance().returnType()), soot.Modifier.STATIC);
PrivateMethodAccMethodSource pmams = new PrivateMethodAccMethodSource(
call.methodInstance()
);
conClass.addMethod(meth);
meth.setActiveBody(pmams.getBody(meth, null));
InitialResolver.v().addToPrivateMethodGetAccessMap(call, meth);
meth.addTag(new soot.tagkit.SyntheticTag());
return meth;
}
protected soot.Value getAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
return base().getSimpleAssignRightLocal(assign);
}
else if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN && assign.type().toString().equals("java.lang.String")){
return getStringConcatAssignRightLocal(assign);
}
else {
return getComplexAssignRightLocal(assign, leftLocal);
}
}
protected soot.Value getSimpleAssignRightLocal(polyglot.ast.Assign assign){
soot.Value right = base().createExpr(assign.right());
if (right instanceof soot.jimple.ConditionExpr) {
right = handleCondBinExpr((soot.jimple.ConditionExpr)right);
}
return right;
}
private soot.Local getStringConcatAssignRightLocal(polyglot.ast.Assign assign){
soot.Local sb = (soot.Local)createStringBuffer(assign);
generateAppends(assign.left(), sb);
generateAppends(assign.right(), sb);
soot.Local rLocal = createToString(sb, assign);
return rLocal;
}
private soot.Local getComplexAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
soot.Value right = base().createExpr(assign.right());
if (right instanceof soot.jimple.ConditionExpr) {
right = handleCondBinExpr((soot.jimple.ConditionExpr)right);
}
soot.jimple.BinopExpr binop = null;
if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) {
binop = soot.jimple.Jimple.v().newAddExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SUB_ASSIGN){
binop = soot.jimple.Jimple.v().newSubExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.MUL_ASSIGN) {
binop = soot.jimple.Jimple.v().newMulExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.DIV_ASSIGN) {
binop = soot.jimple.Jimple.v().newDivExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.MOD_ASSIGN) {
binop = soot.jimple.Jimple.v().newRemExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SHL_ASSIGN) {
binop = soot.jimple.Jimple.v().newShlExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SHR_ASSIGN) {
binop = soot.jimple.Jimple.v().newShrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.USHR_ASSIGN) {
binop = soot.jimple.Jimple.v().newUshrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_AND_ASSIGN) {
binop = soot.jimple.Jimple.v().newAndExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_OR_ASSIGN) {
binop = soot.jimple.Jimple.v().newOrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_XOR_ASSIGN) {
binop = soot.jimple.Jimple.v().newXorExpr(leftLocal, right);
}
soot.Local retLocal = lg.generateLocal(leftLocal.getType());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, binop);
body.getUnits().add(assignStmt);
Util.addLnPosTags(binop.getOp1Box(), assign.left().position());
Util.addLnPosTags(binop.getOp2Box(), assign.right().position());
return retLocal;
}
private soot.Value getSimpleAssignLocal(polyglot.ast.Assign assign){
soot.jimple.AssignStmt stmt;
soot.Value left = base().createLHS(assign.left());
soot.Value right = base().getSimpleAssignRightLocal(assign);
stmt = soot.jimple.Jimple.v().newAssignStmt(left, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (left instanceof soot.Local){
return left;
}
else {
return right;
}
}
private soot.Value getStrConAssignLocal(polyglot.ast.Assign assign){
soot.jimple.AssignStmt stmt;
soot.Value left = base().createLHS(assign.left());
soot.Value right = getStringConcatAssignRightLocal(assign);
stmt = soot.jimple.Jimple.v().newAssignStmt(left, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (left instanceof soot.Local){
return left;
}
else {
return right;
}
}
/**
* Assign Expression Creation
*/
protected soot.Value getAssignLocal(polyglot.ast.Assign assign) {
// handle private access field assigns
//HashMap accessMap = ((PolyglotMethodSource)body.getMethod().getSource()).getPrivateAccessMap();
// if assigning to a field and the field is private and its not in
// this class (then it had better be in some outer class and will
// be handled as such)
if (base().needsAccessor(assign.left())){
//if ((assign.left() instanceof polyglot.ast.Field) && (needsPrivateAccessor((polyglot.ast.Field)assign.left()) || needsProtectedAccessor((polyglot.ast.Field)assign.left()))){
//((polyglot.ast.Field)assign.left()).fieldInstance().flags().isPrivate() && !Util.getSootType(((polyglot.ast.Field)assign.left()).fieldInstance().container()).equals(body.getMethod().getDeclaringClass().getType())){
return base().handlePrivateFieldAssignSet(assign);
}
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
return getSimpleAssignLocal(assign);
}
if ((assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) && assign.type().toString().equals("java.lang.String")){
return getStrConAssignLocal(assign);
}
soot.jimple.AssignStmt stmt;
soot.Value left = base().createLHS(assign.left());
soot.Value left2 = (soot.Value)left.clone();
soot.Local leftLocal;
if (left instanceof soot.Local){
leftLocal = (soot.Local)left;
}
else {
leftLocal = lg.generateLocal(left.getType());
soot.jimple.AssignStmt stmt1 = soot.jimple.Jimple.v().newAssignStmt(leftLocal, left);
body.getUnits().add(stmt1);
Util.addLnPosTags(stmt1, assign.position());
}
soot.Value right = base().getAssignRightLocal(assign, leftLocal);
soot.jimple.AssignStmt stmt2 = soot.jimple.Jimple.v().newAssignStmt(leftLocal, right);
body.getUnits().add(stmt2);
Util.addLnPosTags(stmt2, assign.position());
Util.addLnPosTags(stmt2.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt2.getLeftOpBox(), assign.left().position());
if (!(left instanceof soot.Local)) {
soot.jimple.AssignStmt stmt3 = soot.jimple.Jimple.v().newAssignStmt(left2, leftLocal);
body.getUnits().add(stmt3);
Util.addLnPosTags(stmt3, assign.position());
Util.addLnPosTags(stmt3.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt3.getLeftOpBox(), assign.left().position());
}
return leftLocal;
}
/**
* Field Expression Creation - LHS
*/
private soot.Value getFieldLocalLeft(polyglot.ast.Field field){
polyglot.ast.Receiver receiver = field.target();
if ((field.name().equals("length")) && (receiver.type() instanceof polyglot.types.ArrayType)){
return getSpecialArrayLengthLocal(field);
}
else {
return getFieldRef(field);
}
}
/**
* Field Expression Creation
*/
private soot.Value getFieldLocal(polyglot.ast.Field field){
polyglot.ast.Receiver receiver = field.target();
soot.javaToJimple.PolyglotMethodSource ms = (soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource();
if ((field.name().equals("length")) && (receiver.type() instanceof polyglot.types.ArrayType)){
return getSpecialArrayLengthLocal(field);
}
else if (field.name().equals("class")){
throw new RuntimeException("Should go through ClassLit");
}
else if (base().needsAccessor(field)){
//else if (needsPrivateAccessor(field) || needsProtectedAccessor(field)){
//((field.fieldInstance().flags().isPrivate() && !Util.getSootType(field.fieldInstance().container()).equals(body.getMethod().getDeclaringClass().getType())) ||()){
soot.Value base = base().getBaseLocal(field.target());
return getPrivateAccessFieldLocal(field, base);
}
if ((field.target() instanceof polyglot.ast.Special) && (((polyglot.ast.Special)field.target()).kind() == polyglot.ast.Special.SUPER) && (((polyglot.ast.Special)field.target()).qualifier() != null)){
return getSpecialSuperQualifierLocal(field);
}
else if (shouldReturnConstant(field)){
return getReturnConstant(field);
// in this case don't return fieldRef but a string constant
}
else {
soot.jimple.FieldRef fieldRef = getFieldRef(field);
soot.Local baseLocal = generateLocal(field.type());
soot.jimple.AssignStmt fieldAssignStmt = soot.jimple.Jimple.v().newAssignStmt(baseLocal, fieldRef);
body.getUnits().add(fieldAssignStmt);
Util.addLnPosTags(fieldAssignStmt, field.position());
Util.addLnPosTags(fieldAssignStmt.getRightOpBox(), field.position());
return baseLocal;
}
}
protected boolean needsAccessor(polyglot.ast.Expr expr){
if (!(expr instanceof polyglot.ast.Field) && !(expr instanceof polyglot.ast.Call)) return false;
else {
if (expr instanceof polyglot.ast.Field){
return needsAccessor(((polyglot.ast.Field)expr).fieldInstance());
}
else {
return needsAccessor(((polyglot.ast.Call)expr).methodInstance());
}
}
}
/**
* needs accessors:
* when field or meth is private and in some other class
* when field or meth is protected and in
*/
protected boolean needsAccessor(polyglot.types.MemberInstance inst){
if (inst.flags().isPrivate()){
if (!Util.getSootType(inst.container()).equals(body.getMethod().getDeclaringClass().getType())){
return true;
}
}
else if (inst.flags().isProtected()){
if (Util.getSootType(inst.container()).equals(body.getMethod().getDeclaringClass().getType())){
return false;
}
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
if (currentClass.getSuperclass().getType().equals(Util.getSootType(inst.container()))){
return false;
}
while (currentClass.hasOuterClass()){
currentClass = currentClass.getOuterClass();
if (Util.getSootType(inst.container()).equals(currentClass.getType())){
return false;
}
else if (Util.getSootType(inst.container()).equals(currentClass.getSuperclass().getType())){
return true;
}
}
return false;
}
return false;
}
/**
* needs a private access method if field is private and in
* some other class
*/
/*protected boolean needsPrivateAccessor(polyglot.ast.Field field){
if (field.fieldInstance().flags().isPrivate()){
if (!Util.getSootType(field.fieldInstance().container()).equals(body.getMethod().getDeclaringClass().getType())){
return true;
}
}
return false;
}*/
/**
* needs a protected access method if field is protected and in
* a super class of the outer class of the innerclass trying to access
* the field (ie not in self or in outer of self)
*/
/*
if (field.fieldInstance().flags().isProtected()){
if (!Util.getSootType(field.fieldInstance().container()).equals(body.getMethod().getDeclaringClass().getType())){
soot.SootClass checkClass = body.getMethod().getDeclaringClass();
while (checkClass.hasOuterClass()){
checkClass = checkClass.getOuterClass();
if (Util.getSootType(field.fieldInstance().container()).equals(checkClass.getType())){
return false;
}
}
return true;
}
}
return false;*/
private soot.jimple.Constant getReturnConstant(polyglot.ast.Field field){
return getConstant(field.constantValue(), field.type());
}
private soot.jimple.Constant getConstant(Object constVal, polyglot.types.Type type){
//System.out.println("getConstant: "+constVal);
if (constVal instanceof String){
return soot.jimple.StringConstant.v((String)constVal);
}
else if (constVal instanceof Boolean){
boolean val = ((Boolean)constVal).booleanValue();
return soot.jimple.IntConstant.v(val ? 1 : 0);
}
else if (type.isChar()){
char val;
if (constVal instanceof Integer){
val = (char)((Integer)constVal).intValue();
}
else {
val = ((Character)constVal).charValue();
}
//System.out.println("val: "+val);
return soot.jimple.IntConstant.v(val);
}
else {
Number num = (Number)constVal;
//System.out.println("num: "+num);
num = createConstantCast(type, num);
//System.out.println("num: "+num);
if (num instanceof Long) {
//System.out.println(((Long)num).longValue());
return soot.jimple.LongConstant.v(((Long)num).longValue());
}
else if (num instanceof Double) {
return soot.jimple.DoubleConstant.v(((Double)num).doubleValue());
}
else if (num instanceof Float) {
return soot.jimple.FloatConstant.v(((Float)num).floatValue());
}
else if (num instanceof Byte) {
return soot.jimple.IntConstant.v(((Byte)num).byteValue());
}
else if (num instanceof Short) {
return soot.jimple.IntConstant.v(((Short)num).shortValue());
}
else {
return soot.jimple.IntConstant.v(((Integer)num).intValue());
}
}
}
private Number createConstantCast(polyglot.types.Type fieldType, Number constant) {
if (constant instanceof Integer){
if (fieldType.isDouble()){
return new Double((double)((Integer)constant).intValue());
}
else if (fieldType.isFloat()){
return new Float((float)((Integer)constant).intValue());
}
else if (fieldType.isLong()){
return new Long((long)((Integer)constant).intValue());
}
}
return constant;
}
private boolean shouldReturnConstant(polyglot.ast.Field field){
if (field.isConstant() && field.constantValue() != null) {
return true;
}
return false;
}
/**
* creates a field ref
*/
protected soot.jimple.FieldRef getFieldRef(polyglot.ast.Field field) {
soot.SootClass receiverClass = ((soot.RefType)Util.getSootType(field.target().type())).getSootClass();
soot.SootFieldRef receiverField = soot.Scene.v().makeFieldRef(receiverClass, field.name(), Util.getSootType(field.type()), field.flags().isStatic());
soot.jimple.FieldRef fieldRef;
if (field.fieldInstance().flags().isStatic()) {
fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(receiverField);
}
else {
soot.Local base;
base = (soot.Local)base().getBaseLocal(field.target());
fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(base, receiverField);
}
if (field.target() instanceof polyglot.ast.Local && fieldRef instanceof soot.jimple.InstanceFieldRef){
Util.addLnPosTags(((soot.jimple.InstanceFieldRef)fieldRef).getBaseBox(), field.target().position());
}
return fieldRef;
}
/**
* For Inner Classes - to access private fields of their outer class
*/
private soot.Local getPrivateAccessFieldLocal(polyglot.ast.Field field, soot.Value base) {
// need to add access method
// if private add to the containing class
// but if its protected then add to outer class - not containing
// class because in this case the containing class is the super class
soot.SootMethod toInvoke;
soot.SootClass invokeClass;
if (field.fieldInstance().flags().isPrivate()){
toInvoke = addGetFieldAccessMeth(((soot.RefType)Util.getSootType(field.fieldInstance().container())).getSootClass(), field);
invokeClass = ((soot.RefType)Util.getSootType(field.fieldInstance().container())).getSootClass();
}
else {
if (InitialResolver.v().hierarchy() == null){
InitialResolver.v().hierarchy(new soot.FastHierarchy());
}
soot.SootClass containingClass = ((soot.RefType)Util.getSootType(field.fieldInstance().container())).getSootClass();
soot.SootClass addToClass;
if (body.getMethod().getDeclaringClass().hasOuterClass()){
addToClass = body.getMethod().getDeclaringClass().getOuterClass();
while (!InitialResolver.v().hierarchy().canStoreType(containingClass.getType(), addToClass.getType())){
if (addToClass.hasOuterClass()){
addToClass = addToClass.getOuterClass();
}
else {
break;
}
}
}
else{
addToClass = containingClass;
}
invokeClass = addToClass;
toInvoke = addGetFieldAccessMeth(addToClass, field);
}
ArrayList params = new ArrayList();
if (!field.fieldInstance().flags().isStatic()) {
params.add(base);
/*if (field.target() instanceof polyglot.ast.Expr){
params.add((soot.Local)base().getBaseLocal(field.target()));
}
else if (body.getMethod().getDeclaringClass().declaresFieldByName("this$0")){
params.add(getThis(invokeClass.getType()));
}
else {
soot.Local local = (soot.Local)base().getBaseLocal(field.target());
params.add(local);
}*/
//(soot.Local)getBaseLocal(field.target()));
/*if (Util.getSootType(field.target().type()).equals(invokeClass.getType())){
*/
//params.add(getThis(invokeClass.getType()));//(soot.Local)getBaseLocal(field.target()));
/*else {*/
//soot.Local local = (soot.Local)getBaseLocal(field.target());
//params.add(getThis(invokeClass.getType()));//(soot.Local)getBaseLocal(field.target()));
//soot.Local local = (soot.Local)getBaseLocal(field.target());
//params.add(local);
}
return Util.getPrivateAccessFieldInvoke(toInvoke.makeRef(), params, body, lg);
}
/**
* To get the local for the special .class literal
*/
private soot.Local getSpecialClassLitLocal(polyglot.ast.ClassLit lit) {
if (lit.typeNode().type().isPrimitive()){
polyglot.types.PrimitiveType primType = (polyglot.types.PrimitiveType)lit.typeNode().type();
soot.Local retLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.SootFieldRef primField = null;
if (primType.isBoolean()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Boolean"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isByte()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Byte"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isChar()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Character"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isDouble()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Double"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isFloat()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Float"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isInt()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Integer"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isLong()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Long"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isShort()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Short"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
else if (primType.isVoid()){
primField = soot.Scene.v().makeFieldRef(soot.Scene.v().getSootClass("java.lang.Void"), "TYPE", soot.RefType.v("java.lang.Class"), true);
}
soot.jimple.StaticFieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(primField);
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, fieldRef);
body.getUnits().add(assignStmt);
return retLocal;
}
else {
// this class
soot.SootClass thisClass = body.getMethod().getDeclaringClass();
String fieldName = Util.getFieldNameForClassLit(lit.typeNode().type());
soot.Type fieldType = soot.RefType.v("java.lang.Class");
soot.Local fieldLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.SootFieldRef sootField = null;
if (thisClass.isInterface()){
HashMap specialAnonMap = InitialResolver.v().specialAnonMap();
if ((specialAnonMap != null) && (specialAnonMap.containsKey(thisClass))){
soot.SootClass specialClass = (soot.SootClass)specialAnonMap.get(thisClass);
sootField = soot.Scene.v().makeFieldRef(specialClass, fieldName, fieldType, true);
}
else {
throw new RuntimeException("Class is interface so it must have an anon class to handle class lits but its anon class cannot be found.");
}
}
else {
sootField = soot.Scene.v().makeFieldRef(thisClass, fieldName, fieldType, true);
}
soot.jimple.StaticFieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(sootField);
soot.jimple.Stmt fieldAssign = soot.jimple.Jimple.v().newAssignStmt(fieldLocal, fieldRef);
body.getUnits().add(fieldAssign);
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Expr neExpr = soot.jimple.Jimple.v().newNeExpr(fieldLocal, soot.jimple.NullConstant.v());
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(neExpr, noop1);
body.getUnits().add(ifStmt);
ArrayList paramTypes = new ArrayList();
paramTypes.add(soot.RefType.v("java.lang.String"));
soot.SootMethodRef invokeMeth = null;
if (thisClass.isInterface()){
HashMap specialAnonMap = InitialResolver.v().specialAnonMap();
if ((specialAnonMap != null) && (specialAnonMap.containsKey(thisClass))){
soot.SootClass specialClass = (soot.SootClass)specialAnonMap.get(thisClass);
invokeMeth = soot.Scene.v().makeMethodRef(specialClass, "class$", paramTypes, soot.RefType.v("java.lang.Class"), true);
}
else {
throw new RuntimeException("Class is interface so it must have an anon class to handle class lits but its anon class cannot be found.");
}
}
else {
invokeMeth = soot.Scene.v().makeMethodRef(thisClass, "class$", paramTypes, soot.RefType.v("java.lang.Class"), true);
}
ArrayList params = new ArrayList();
params.add(soot.jimple.StringConstant.v(Util.getParamNameForClassLit(lit.typeNode().type())));
soot.jimple.Expr classInvoke = soot.jimple.Jimple.v().newStaticInvokeExpr(invokeMeth, params);
soot.Local methLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.jimple.Stmt invokeAssign = soot.jimple.Jimple.v().newAssignStmt(methLocal, classInvoke);
body.getUnits().add(invokeAssign);
soot.jimple.Stmt assignField = soot.jimple.Jimple.v().newAssignStmt(fieldRef, methLocal);
body.getUnits().add(assignField);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
fieldAssign = soot.jimple.Jimple.v().newAssignStmt(methLocal, fieldRef);
body.getUnits().add(fieldAssign);
body.getUnits().add(noop2);
return methLocal;
}
}
/**
* Array Length local for example a.length w/o brackets gets length
* of array
*/
private soot.Local getSpecialArrayLengthLocal(polyglot.ast.Field field) {
soot.Local localField;
polyglot.ast.Receiver receiver = field.target();
if (receiver instanceof polyglot.ast.Local) {
localField = getLocal((polyglot.ast.Local)receiver);
}
else if (receiver instanceof polyglot.ast.Expr){
localField = (soot.Local)base().createExpr((polyglot.ast.Expr)receiver);
}
else {
localField = generateLocal(receiver.type());
}
soot.jimple.LengthExpr lengthExpr = soot.jimple.Jimple.v().newLengthExpr(localField);
soot.Local retLocal = lg.generateLocal(soot.IntType.v());
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, lengthExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, field.position());
Util.addLnPosTags(lengthExpr.getOpBox(), field.target().position());
return retLocal;
}
/**
* Binary Expression Creation
*/
private soot.Value getBinaryLocal(polyglot.ast.Binary binary) {
//System.out.println("binary: "+binary);
soot.Value rhs;
if (binary.operator() == polyglot.ast.Binary.COND_AND) {
return createCondAnd(binary);
}
if (binary.operator() == polyglot.ast.Binary.COND_OR) {
return createCondOr(binary);
}
if (binary.type().toString().equals("java.lang.String")){
//System.out.println("binary: "+binary);
if (areAllStringLits(binary)){
String result = createStringConstant(binary);
//System.out.println("created string constant: "+result);
return soot.jimple.StringConstant.v(result);
}
else {
soot.Local sb = (soot.Local)createStringBuffer(binary);
generateAppends(binary.left(), sb);
generateAppends(binary.right(), sb);
return createToString(sb, binary);
}
}
soot.Value lVal = base().createExpr(binary.left());
soot.Value rVal = base().createExpr(binary.right());
if (isComparisonBinary(binary.operator())) {
rhs = getBinaryComparisonExpr(lVal, rVal, binary.operator());
}
else {
rhs = getBinaryExpr(lVal, rVal, binary.operator());
}
if (rhs instanceof soot.jimple.BinopExpr) {
Util.addLnPosTags(((soot.jimple.BinopExpr)rhs).getOp1Box(), binary.left().position());
Util.addLnPosTags(((soot.jimple.BinopExpr)rhs).getOp2Box(), binary.right().position());
}
if (rhs instanceof soot.jimple.ConditionExpr) {
return rhs;
}
soot.Local lhs = generateLocal(binary.type());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(lhs, rhs);
body.getUnits().add(assignStmt);
//System.out.println("binary pos: "+binary.position());
Util.addLnPosTags(assignStmt.getRightOpBox(), binary.position());
Util.addLnPosTags(assignStmt, binary.position());
return lhs;
}
private boolean areAllStringLits(polyglot.ast.Node node){
//System.out.println("node in is string lit: "+node+" kind: "+node.getClass());
if (node instanceof polyglot.ast.StringLit) return true;
else if ( node instanceof polyglot.ast.Field) {
if (shouldReturnConstant((polyglot.ast.Field)node)) return true;
else return false;
}
else if (node instanceof polyglot.ast.Binary){
if (areAllStringLitsBinary((polyglot.ast.Binary)node)) return true;
return false;
}
else if (node instanceof polyglot.ast.Cast){
polyglot.ast.Cast cast = (polyglot.ast.Cast)node;
if (cast.isConstant()){
return true;
}
return false;
}
else if (node instanceof polyglot.ast.Lit){
polyglot.ast.Lit lit = (polyglot.ast.Lit)node;
if (lit.isConstant()){
return true;
}
return false;
}
return false;
}
private boolean areAllStringLitsBinary(polyglot.ast.Binary binary){
if (areAllStringLits(binary.left()) && areAllStringLits(binary.right())) return true;
else return false;
}
private String createStringConstant(polyglot.ast.Node node){
//System.out.println("creatinf string constant: "+createConstant((polyglot.ast.Expr)node));
String s = null;
if (node instanceof polyglot.ast.StringLit){
s = ((polyglot.ast.StringLit)node).value();
}
else if (node instanceof polyglot.ast.Cast){
polyglot.ast.Cast cast = (polyglot.ast.Cast)node;
if (cast.type().isChar()){
s = "" + (char)((Character)cast.constantValue()).charValue();
}
else {
s = "" + cast.constantValue();
}
}
else if (node instanceof polyglot.ast.CharLit){
s = "" + ((polyglot.ast.CharLit)node).value();
}
else if (node instanceof polyglot.ast.BooleanLit){
s = "" + ((polyglot.ast.BooleanLit)node).value();
}
else if (node instanceof polyglot.ast.IntLit){
s = "" + ((polyglot.ast.IntLit)node).value();
}
else if (node instanceof polyglot.ast.FloatLit){
s = "" + ((polyglot.ast.FloatLit)node).value();
}
else if (node instanceof polyglot.ast.Field){
polyglot.ast.Field field = (polyglot.ast.Field)node;
if (field.fieldInstance().constantValue() instanceof String){
s = (String)field.constantValue();
}
else if (field.fieldInstance().constantValue() instanceof Boolean){
boolean val = ((Boolean)field.constantValue()).booleanValue();
int temp = val ? 1 : 0;
s = "" + temp;
}
else if (field.type().isChar()){
char val = (char)((Integer)field.constantValue()).intValue();
s = "" + val;
}
else {
Number num = (Number)field.fieldInstance().constantValue();
num = createConstantCast(field.type(), num);
if (num instanceof Long) {
s = "" + ((Long)num).longValue();
}
else if (num instanceof Double) {
s = "" + ((Double)num).doubleValue();
}
else if (num instanceof Float) {
s = "" + ((Float)num).floatValue();
}
else if (num instanceof Byte) {
s = "" + ((Byte)num).byteValue();
}
else if (num instanceof Short) {
s = "" + ((Short)num).shortValue();
}
else {
s = "" + ((Integer)num).intValue();
}
}
}
else if (node instanceof polyglot.ast.Binary){
s = createStringConstantBinary((polyglot.ast.Binary)node);
}
else {
throw new RuntimeException("No other string constant folding done");
}
return s;
}
private String createStringConstantBinary(polyglot.ast.Binary binary){
//System.out.println("create string binary: type"+binary.type());
String s = null;
if (Util.getSootType(binary.type()).toString().equals("java.lang.String")){
// here if type is a string return string constant
s = createStringConstant(binary.left()) + createStringConstant(binary.right());
}
else {
// else eval and return string of the eval result
s = binary.constantValue().toString();
}
return s;
}
private boolean isComparisonBinary(polyglot.ast.Binary.Operator op) {
if ((op == polyglot.ast.Binary.EQ) || (op == polyglot.ast.Binary.NE) ||
(op == polyglot.ast.Binary.GE) || (op == polyglot.ast.Binary.GT) ||
(op == polyglot.ast.Binary.LE) || (op == polyglot.ast.Binary.LT)) {
return true;
}
else {
return false;
}
}
/**
* Creates a binary expression that is not a comparison
*/
private soot.Value getBinaryExpr(soot.Value lVal, soot.Value rVal, polyglot.ast.Binary.Operator operator){
soot.Value rValue = null;
if (lVal instanceof soot.jimple.ConditionExpr) {
lVal = handleCondBinExpr((soot.jimple.ConditionExpr)lVal);
}
if (rVal instanceof soot.jimple.ConditionExpr) {
rVal = handleCondBinExpr((soot.jimple.ConditionExpr)rVal);
}
if (operator == polyglot.ast.Binary.ADD){
rValue = soot.jimple.Jimple.v().newAddExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.SUB){
rValue = soot.jimple.Jimple.v().newSubExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.MUL){
rValue = soot.jimple.Jimple.v().newMulExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.DIV){
rValue = soot.jimple.Jimple.v().newDivExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.SHR){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newShrExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newShrExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.USHR){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newUshrExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newUshrExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.SHL){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newShlExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newShlExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.BIT_AND){
rValue = soot.jimple.Jimple.v().newAndExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.BIT_OR){
rValue = soot.jimple.Jimple.v().newOrExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.BIT_XOR){
rValue = soot.jimple.Jimple.v().newXorExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.MOD){
rValue = soot.jimple.Jimple.v().newRemExpr(lVal, rVal);
}
else {
throw new RuntimeException("Binary not yet handled!");
}
return rValue;
}
/**
* Creates a binary expr that is a comparison
*/
private soot.Value getBinaryComparisonExpr(soot.Value lVal, soot.Value rVal, polyglot.ast.Binary.Operator operator) {
soot.Value rValue;
if (operator == polyglot.ast.Binary.EQ){
rValue = soot.jimple.Jimple.v().newEqExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.GE){
rValue = soot.jimple.Jimple.v().newGeExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.GT){
rValue = soot.jimple.Jimple.v().newGtExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.LE){
rValue = soot.jimple.Jimple.v().newLeExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.LT){
rValue = soot.jimple.Jimple.v().newLtExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.NE){
rValue = soot.jimple.Jimple.v().newNeExpr(lVal, rVal);
}
else {
throw new RuntimeException("Unknown Comparison Expr");
}
return rValue;
}
/**
* in bytecode and Jimple the conditions in conditional binary
* expressions are often reversed
*/
private soot.Value reverseCondition(soot.jimple.ConditionExpr cond) {
soot.jimple.ConditionExpr newExpr;
if (cond instanceof soot.jimple.EqExpr) {
newExpr = soot.jimple.Jimple.v().newNeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.NeExpr) {
newExpr = soot.jimple.Jimple.v().newEqExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.GtExpr) {
newExpr = soot.jimple.Jimple.v().newLeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.GeExpr) {
newExpr = soot.jimple.Jimple.v().newLtExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.LtExpr) {
newExpr = soot.jimple.Jimple.v().newGeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.LeExpr) {
newExpr = soot.jimple.Jimple.v().newGtExpr(cond.getOp1(), cond.getOp2());
}
else {
throw new RuntimeException("Unknown Condition Expr");
}
newExpr.getOp1Box().addAllTagsOf(cond.getOp1Box());
newExpr.getOp2Box().addAllTagsOf(cond.getOp2Box());
return newExpr;
}
/**
* Special conditions for doubles and floats and longs
*/
private soot.Value handleDFLCond(soot.jimple.ConditionExpr cond){
soot.Local result = lg.generateLocal(soot.ByteType.v());
soot.jimple.Expr cmExpr = null;
if (isDouble(cond.getOp1()) || isDouble(cond.getOp2()) || isFloat(cond.getOp1()) || isFloat(cond.getOp2())) {
// use cmpg and cmpl
if ((cond instanceof soot.jimple.GeExpr) || (cond instanceof soot.jimple.GtExpr)) {
// use cmpg
cmExpr = soot.jimple.Jimple.v().newCmpgExpr(cond.getOp1(), cond.getOp2());
}
else {
// use cmpl
cmExpr = soot.jimple.Jimple.v().newCmplExpr(cond.getOp1(), cond.getOp2());
}
}
else if (isLong(cond.getOp1()) || isLong(cond.getOp2())) {
// use cmp
cmExpr = soot.jimple.Jimple.v().newCmpExpr(cond.getOp1(), cond.getOp2());
}
else {
return cond;
}
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(result, cmExpr);
body.getUnits().add(assign);
if (cond instanceof soot.jimple.EqExpr){
cond = soot.jimple.Jimple.v().newEqExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.GeExpr){
cond = soot.jimple.Jimple.v().newGeExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.GtExpr){
cond = soot.jimple.Jimple.v().newGtExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.LeExpr){
cond = soot.jimple.Jimple.v().newLeExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.LtExpr){
cond = soot.jimple.Jimple.v().newLtExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.NeExpr){
cond = soot.jimple.Jimple.v().newNeExpr(result, soot.jimple.IntConstant.v(0));
}
else {
throw new RuntimeException("Unknown Comparison Expr");
}
return cond;
}
private boolean isDouble(soot.Value val) {
if (val.getType() instanceof soot.DoubleType) return true;
return false;
}
private boolean isFloat(soot.Value val) {
if (val.getType() instanceof soot.FloatType) return true;
return false;
}
private boolean isLong(soot.Value val) {
if (val.getType() instanceof soot.LongType) return true;
return false;
}
/**
* Creates a conitional AND expr
*/
private soot.Local createCondAnd(polyglot.ast.Binary binary) {
soot.Local retLocal = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value lVal = base().createExpr(binary.left());
boolean leftNeedIf = needSootIf(lVal);
if (!(lVal instanceof soot.jimple.ConditionExpr)) {
lVal = soot.jimple.Jimple.v().newEqExpr(lVal, soot.jimple.IntConstant.v(0));
}
else {
lVal = reverseCondition((soot.jimple.ConditionExpr)lVal);
lVal = handleDFLCond((soot.jimple.ConditionExpr)lVal);
}
if (leftNeedIf){
soot.jimple.IfStmt ifLeft = soot.jimple.Jimple.v().newIfStmt(lVal, noop1);
body.getUnits().add(ifLeft);
Util.addLnPosTags(ifLeft.getConditionBox(), binary.left().position());
Util.addLnPosTags(ifLeft, binary.left().position());
}
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.Value rVal = base().createExpr(binary.right());
boolean rightNeedIf = needSootIf(rVal);
if (!(rVal instanceof soot.jimple.ConditionExpr)) {
rVal = soot.jimple.Jimple.v().newEqExpr(rVal, soot.jimple.IntConstant.v(0));
}
else {
rVal = reverseCondition((soot.jimple.ConditionExpr)rVal);
rVal = handleDFLCond((soot.jimple.ConditionExpr)rVal);
}
if (rightNeedIf){
soot.jimple.IfStmt ifRight = soot.jimple.Jimple.v().newIfStmt(rVal, noop1);
body.getUnits().add(ifRight);
Util.addLnPosTags(ifRight.getConditionBox(), binary.right().position());
Util.addLnPosTags(ifRight, binary.right().position());
}
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(1));
body.getUnits().add(assign1);
soot.jimple.Stmt gotoEnd1 = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(gotoEnd1);
body.getUnits().add(noop1);
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(0));
body.getUnits().add(assign2);
body.getUnits().add(endNoop);
Util.addLnPosTags(assign1, binary.position());
Util.addLnPosTags(assign2, binary.position());
return retLocal;
}
/**
* Creates a conditional OR expr
*/
private soot.Local createCondOr(polyglot.ast.Binary binary) {
soot.Local retLocal = lg.generateLocal(soot.BooleanType.v());
//end
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value lVal = base().createExpr(binary.left());
//System.out.println("leftval : "+lVal);
boolean leftNeedIf = needSootIf(lVal);
if (!(lVal instanceof soot.jimple.ConditionExpr)) {
lVal = soot.jimple.Jimple.v().newEqExpr(lVal, soot.jimple.IntConstant.v(1));
}
else {
lVal = handleDFLCond((soot.jimple.ConditionExpr)lVal);
}
if (leftNeedIf){
soot.jimple.IfStmt ifLeft = soot.jimple.Jimple.v().newIfStmt(lVal, noop1);
body.getUnits().add(ifLeft);
Util.addLnPosTags(ifLeft, binary.left().position());
Util.addLnPosTags(ifLeft.getConditionBox(), binary.left().position());
}
soot.Value rVal = base().createExpr(binary.right());
boolean rightNeedIf = needSootIf(rVal);
if (!(rVal instanceof soot.jimple.ConditionExpr)) {
rVal = soot.jimple.Jimple.v().newEqExpr(rVal, soot.jimple.IntConstant.v(1));
}
else {
rVal = handleDFLCond((soot.jimple.ConditionExpr)rVal);
}
if (rightNeedIf){
soot.jimple.IfStmt ifRight = soot.jimple.Jimple.v().newIfStmt(rVal, noop1);
body.getUnits().add(ifRight);
Util.addLnPosTags(ifRight, binary.right().position());
Util.addLnPosTags(ifRight.getConditionBox(), binary.right().position());
}
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(0));
body.getUnits().add(assign2);
Util.addLnPosTags(assign2, binary.position());
soot.jimple.Stmt gotoEnd2 = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(gotoEnd2);
body.getUnits().add(noop1);
soot.jimple.Stmt assign3 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(1));
body.getUnits().add(assign3);
Util.addLnPosTags(assign3, binary.position());
body.getUnits().add(endNoop);
Util.addLnPosTags(assign2, binary.position());
Util.addLnPosTags(assign3, binary.position());
return retLocal;
}
private soot.Local handleCondBinExpr(soot.jimple.ConditionExpr condExpr) {
soot.Local boolLocal = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value newVal;
newVal = reverseCondition(condExpr);
newVal = handleDFLCond((soot.jimple.ConditionExpr)newVal);
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(newVal, noop1);
body.getUnits().add(ifStmt);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(boolLocal, soot.jimple.IntConstant.v(1)));
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(boolLocal, soot.jimple.IntConstant.v(0)));
body.getUnits().add(noop2);
return boolLocal;
}
private soot.Local createStringBuffer(polyglot.ast.Expr expr){
// create and add one string buffer
soot.Local local = lg.generateLocal(soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.NewExpr newExpr = soot.jimple.Jimple.v().newNewExpr(soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, newExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
soot.SootClass classToInvoke1 = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethodRef methodToInvoke1 = soot.Scene.v().makeMethodRef(classToInvoke1, "<init>", new ArrayList(), soot.VoidType.v(), false);
soot.jimple.SpecialInvokeExpr invoke = soot.jimple.Jimple.v().newSpecialInvokeExpr(local, methodToInvoke1);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(invoke);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, expr.position());
return local;
}
private soot.Local createToString(soot.Local sb, polyglot.ast.Expr expr){
// invoke toString on local (type StringBuffer)
soot.Local newString = lg.generateLocal(soot.RefType.v("java.lang.String"));
soot.SootClass classToInvoke2 = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethodRef methodToInvoke2 = soot.Scene.v().makeMethodRef(classToInvoke2, "toString", new ArrayList(), soot.RefType.v("java.lang.String"), false);
soot.jimple.VirtualInvokeExpr toStringInvoke = soot.jimple.Jimple.v().newVirtualInvokeExpr(sb, methodToInvoke2);
soot.jimple.Stmt lastAssign = soot.jimple.Jimple.v().newAssignStmt(newString, toStringInvoke);
body.getUnits().add(lastAssign);
Util.addLnPosTags(lastAssign, expr.position());
return newString;
}
private boolean isStringConcat(polyglot.ast.Expr expr){
if (expr instanceof polyglot.ast.Binary) {
polyglot.ast.Binary bin = (polyglot.ast.Binary)expr;
if (bin.operator() == polyglot.ast.Binary.ADD){
if (bin.type().toString().equals("java.lang.String")) return true;
return false;
}
return false;
}
else if (expr instanceof polyglot.ast.Assign) {
polyglot.ast.Assign assign = (polyglot.ast.Assign)expr;
if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN){
if (assign.type().toString().equals("java.lang.String")) return true;
return false;
}
return false;
}
return false;
}
/**
* Generates one part of a concatenation String
*/
private void generateAppends(polyglot.ast.Expr expr, soot.Local sb) {
//System.out.println("generate appends for expr: "+expr);
if (isStringConcat(expr)){
if (expr instanceof polyglot.ast.Binary){
generateAppends(((polyglot.ast.Binary)expr).left(), sb);
generateAppends(((polyglot.ast.Binary)expr).right(), sb);
}
else {
generateAppends(((polyglot.ast.Assign)expr).left(), sb);
generateAppends(((polyglot.ast.Assign)expr).right(), sb);
}
}
else {
soot.Value toApp = base().createExpr(expr);
//System.out.println("toApp: "+toApp+" type: "+toApp.getType());
soot.Type appendType = null;
if (toApp instanceof soot.jimple.StringConstant) {
appendType = soot.RefType.v("java.lang.String");
}
else if (toApp instanceof soot.jimple.NullConstant){
appendType = soot.RefType.v("java.lang.Object");
}
else if (toApp instanceof soot.jimple.Constant) {
appendType = toApp.getType();
}
else if (toApp instanceof soot.Local) {
if (((soot.Local)toApp).getType() instanceof soot.PrimType) {
appendType = ((soot.Local)toApp).getType();
}
else if (((soot.Local)toApp).getType() instanceof soot.RefType) {
if (((soot.Local)toApp).getType().toString().equals("java.lang.String")){
appendType = soot.RefType.v("java.lang.String");
}
else if (((soot.Local)toApp).getType().toString().equals("java.lang.StringBuffer")){
appendType = soot.RefType.v("java.lang.StringBuffer");
}
else{
appendType = soot.RefType.v("java.lang.Object");
}
}
else {
// this is for arrays
appendType = soot.RefType.v("java.lang.Object");
}
}
else if (toApp instanceof soot.jimple.ConditionExpr) {
toApp = handleCondBinExpr((soot.jimple.ConditionExpr)toApp);
appendType = soot.BooleanType.v();
}
// handle shorts
if (appendType instanceof soot.ShortType || appendType instanceof soot.ByteType) {
soot.Local intLocal = lg.generateLocal(soot.IntType.v());
soot.jimple.Expr cast = soot.jimple.Jimple.v().newCastExpr(toApp, soot.IntType.v());
soot.jimple.Stmt castAssign = soot.jimple.Jimple.v().newAssignStmt(intLocal, cast);
body.getUnits().add(castAssign);
toApp = intLocal;
appendType = soot.IntType.v();
}
ArrayList paramsTypes = new ArrayList();
paramsTypes.add(appendType);
ArrayList params = new ArrayList();
params.add(toApp);
soot.SootClass classToInvoke = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethodRef methodToInvoke = soot.Scene.v().makeMethodRef(classToInvoke, "append", paramsTypes, soot.RefType.v("java.lang.StringBuffer"), false);
soot.jimple.VirtualInvokeExpr appendInvoke = soot.jimple.Jimple.v().newVirtualInvokeExpr(sb, methodToInvoke, params);
Util.addLnPosTags(appendInvoke.getArgBox(0), expr.position());
soot.jimple.Stmt appendStmt = soot.jimple.Jimple.v().newInvokeStmt(appendInvoke);
body.getUnits().add(appendStmt);
Util.addLnPosTags(appendStmt, expr.position());
}
}
/**
* Unary Expression Creation
*/
private soot.Local getUnaryLocal(polyglot.ast.Unary unary) {
polyglot.ast.Expr expr = unary.expr();
polyglot.ast.Unary.Operator op = unary.operator();
if (op == polyglot.ast.Unary.POST_INC || op == polyglot.ast.Unary.PRE_INC || op == polyglot.ast.Unary.POST_DEC || op == polyglot.ast.Unary.PRE_DEC) {
if (base().needsAccessor(unary.expr())){
return base().handlePrivateFieldUnarySet(unary);
}
soot.Value left = base().createLHS(unary.expr());
// do necessary cloning
soot.Value leftClone = soot.jimple.Jimple.v().cloneIfNecessary(left);
soot.Local tmp = lg.generateLocal(left.getType());
soot.jimple.AssignStmt stmt1 = soot.jimple.Jimple.v().newAssignStmt(tmp, left);
body.getUnits().add(stmt1);
Util.addLnPosTags(stmt1, unary.position());
soot.Value incVal = base().getConstant(left.getType(), 1);
soot.jimple.BinopExpr binExpr;
if (unary.operator() == polyglot.ast.Unary.PRE_INC || unary.operator() == polyglot.ast.Unary.POST_INC){
binExpr = soot.jimple.Jimple.v().newAddExpr(tmp, incVal);
}
else {
binExpr = soot.jimple.Jimple.v().newSubExpr(tmp, incVal);
}
soot.Local tmp2 = lg.generateLocal(left.getType());
soot.jimple.AssignStmt assign = soot.jimple.Jimple.v().newAssignStmt(tmp2, binExpr);
body.getUnits().add(assign);
//if (base().needsAccessor(unary.expr())){
// base().handlePrivateFieldSet(unary.expr(), tmp2);
//else {
soot.jimple.AssignStmt stmt3 = soot.jimple.Jimple.v().newAssignStmt(leftClone, tmp2);
body.getUnits().add(stmt3);
if (unary.operator() == polyglot.ast.Unary.POST_DEC || unary.operator() == polyglot.ast.Unary.POST_INC){
return tmp;
}
else {
return tmp2;
}
}
/*if (op == polyglot.ast.Unary.POST_INC){
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = base().createExpr(expr);
soot.jimple.AssignStmt preStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(preStmt);
soot.jimple.AddExpr addExpr = soot.jimple.Jimple.v().newAddExpr(sootExpr, getConstant(retLocal.getType(), 1));
Util.addLnPosTags(addExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, addExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
soot.jimple.AssignStmt aStmt = soot.jimple.Jimple.v().newAssignStmt(sootExpr, local);
body.getUnits().add(aStmt);
Util.addLnPosTags(aStmt, expr.position());
Util.addLnPosTags(aStmt, unary.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess)) {
//if ((expr instanceof polyglot.ast.Field) && (needsPrivateAccessor((polyglot.ast.Field)expr) || needsProtectedAccessor((polyglot.ast.Field)expr))){
if (base().needsAccessor(expr)){
handlePrivateFieldSet(expr, local);
}
else {
soot.Value actualUnaryExpr = createLHS(expr);
soot.jimple.AssignStmt s = soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local);
body.getUnits().add(s);
Util.addLnPosTags(s, expr.position());
Util.addLnPosTags(s.getLeftOpBox(), expr.position());
}
}
return retLocal;
}
else if (op == polyglot.ast.Unary.POST_DEC) {
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = base().createExpr(expr);
soot.jimple.AssignStmt preStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(preStmt);
soot.jimple.SubExpr subExpr = soot.jimple.Jimple.v().newSubExpr(sootExpr, getConstant(retLocal.getType(), 1));
Util.addLnPosTags(subExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, subExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
soot.jimple.AssignStmt aStmt = soot.jimple.Jimple.v().newAssignStmt(sootExpr, local);
body.getUnits().add(aStmt);
Util.addLnPosTags(aStmt, expr.position());
Util.addLnPosTags(aStmt, unary.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess)) {
//if ((expr instanceof polyglot.ast.Field) && (needsPrivateAccessor((polyglot.ast.Field)expr) || needsProtectedAccessor((polyglot.ast.Field)expr))){
if (base().needsAccessor(expr)){
handlePrivateFieldSet(expr, local);
}
else {
soot.Value actualUnaryExpr = createLHS(expr);
soot.jimple.AssignStmt s = soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local);
body.getUnits().add(s);
Util.addLnPosTags(s, expr.position());
Util.addLnPosTags(s.getLeftOpBox(), expr.position());
}
}
return retLocal;
}
else if (op == polyglot.ast.Unary.PRE_INC) {
soot.Value sootExpr = base().createExpr(expr);
soot.jimple.AddExpr addExpr = soot.jimple.Jimple.v().newAddExpr(sootExpr, getConstant(sootExpr.getType(), 1));
Util.addLnPosTags(addExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, addExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess) || (expr instanceof polyglot.ast.Local)) {
//if ((expr instanceof polyglot.ast.Field) && (needsPrivateAccessor((polyglot.ast.Field)expr) || needsProtectedAccessor((polyglot.ast.Field)expr))){
if (base().needsAccessor(expr)){
handlePrivateFieldSet(expr, local);
}
else {
soot.Value actualUnaryExpr = createLHS(expr);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local));
}
}
return local;
}
else if (op == polyglot.ast.Unary.PRE_DEC) {
soot.Value sootExpr = base().createExpr(expr);
soot.jimple.SubExpr subExpr = soot.jimple.Jimple.v().newSubExpr(sootExpr, getConstant(sootExpr.getType(), 1));
Util.addLnPosTags(subExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, subExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess) || (expr instanceof polyglot.ast.Local)) {
//if ((expr instanceof polyglot.ast.Field) && (needsPrivateAccessor((polyglot.ast.Field)expr) || needsProtectedAccessor((polyglot.ast.Field)expr))){
if (base().needsAccessor(expr)){
handlePrivateFieldSet(expr, local);
}
else {
soot.Value actualUnaryExpr = createLHS(expr);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local));
}
}
return local;
}*/
else if (op == polyglot.ast.Unary.BIT_NOT) {
soot.jimple.IntConstant int1 = soot.jimple.IntConstant.v(-1);
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = base().createExpr(expr);
soot.jimple.XorExpr xor = soot.jimple.Jimple.v().newXorExpr(sootExpr, base().getConstant(sootExpr.getType(), -1));
Util.addLnPosTags(xor.getOp1Box(), expr.position());
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, xor);
body.getUnits().add(assign1);
Util.addLnPosTags(assign1, unary.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.NEG) {
soot.Value sootExpr;
if (expr instanceof polyglot.ast.IntLit) {
long longVal = ((polyglot.ast.IntLit)expr).value();
if (((polyglot.ast.IntLit)expr).kind() == polyglot.ast.IntLit.LONG){
sootExpr = soot.jimple.LongConstant.v(-longVal);
}
else {
sootExpr = soot.jimple.IntConstant.v(-(int)longVal);
}
}
else if (expr instanceof polyglot.ast.FloatLit){
double doubleVal = ((polyglot.ast.FloatLit)expr).value();
if (((polyglot.ast.FloatLit)expr).kind() == polyglot.ast.FloatLit.DOUBLE){
sootExpr = soot.jimple.DoubleConstant.v(-doubleVal);
}
else {
sootExpr = soot.jimple.FloatConstant.v(-(float)doubleVal);
}
}
else {
soot.Value local = base().createExpr(expr);
soot.jimple.NegExpr negExpr = soot.jimple.Jimple.v().newNegExpr(local);
sootExpr = negExpr;
Util.addLnPosTags(negExpr.getOpBox(), expr.position());
}
soot.Local retLocal = generateLocal(expr.type());
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.POS) {
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = base().createExpr(expr);
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.NOT) {
soot.Value local = base().createExpr(expr);
if (local instanceof soot.jimple.ConditionExpr){
local = handleCondBinExpr((soot.jimple.ConditionExpr)local);
}
soot.jimple.NeExpr neExpr = soot.jimple.Jimple.v().newNeExpr(local, base().getConstant(local.getType(), 0));
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(neExpr, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt, expr.position());
Util.addLnPosTags(ifStmt.getConditionBox(), expr.position());
soot.Local retLocal = lg.generateLocal(local.getType());
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, base().getConstant(retLocal.getType(), 1));
body.getUnits().add(assign1);
Util.addLnPosTags(assign1, expr.position());
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, base().getConstant(retLocal.getType(), 0));
body.getUnits().add(assign2);
Util.addLnPosTags(assign2, expr.position());
body.getUnits().add(noop2);
return retLocal;
}
else {
throw new RuntimeException("Unhandled Unary Expr");
}
}
/**
* Returns a needed constant given a type and val
*/
protected soot.jimple.Constant getConstant(soot.Type type, int val) {
if (type instanceof soot.DoubleType) {
return soot.jimple.DoubleConstant.v(val);
}
else if (type instanceof soot.FloatType) {
return soot.jimple.FloatConstant.v(val);
}
else if (type instanceof soot.LongType) {
return soot.jimple.LongConstant.v(val);
}
else {
return soot.jimple.IntConstant.v(val);
}
}
/**
* Cast Expression Creation
*/
private soot.Value getCastLocal(polyglot.ast.Cast castExpr){
// if its already the right type
if (castExpr.expr().type().equals(castExpr.type()) || (castExpr.type().isClass() && Util.getSootType(castExpr.type()).toString().equals("java.lang.Object"))) {
return base().createExpr(castExpr.expr());
}
soot.Value val;
val = base().createExpr(castExpr.expr());
soot.Type type = Util.getSootType(castExpr.type());
soot.jimple.CastExpr cast = soot.jimple.Jimple.v().newCastExpr(val, type);
Util.addLnPosTags(cast.getOpBox(), castExpr.expr().position());
soot.Local retLocal = lg.generateLocal(cast.getCastType());
soot.jimple.Stmt castAssign = soot.jimple.Jimple.v().newAssignStmt(retLocal, cast);
body.getUnits().add(castAssign);
Util.addLnPosTags(castAssign, castExpr.position());
return retLocal;
}
/**
* Procedure Call Helper Methods
* Returns list of params
*/
private ArrayList getSootParams(polyglot.ast.ProcedureCall call) {
ArrayList sootParams = new ArrayList();
Iterator it = call.arguments().iterator();
while (it.hasNext()) {
polyglot.ast.Expr next = (polyglot.ast.Expr)it.next();
soot.Value nextExpr = base().createExpr(next);
if (nextExpr instanceof soot.jimple.ConditionExpr){
nextExpr = handleCondBinExpr((soot.jimple.ConditionExpr)nextExpr);
}
sootParams.add(nextExpr);
}
return sootParams;
}
/**
* Returns list of param types
*/
private ArrayList getSootParamsTypes(polyglot.ast.ProcedureCall call) {
ArrayList sootParamsTypes = new ArrayList();
Iterator it = call.procedureInstance().formalTypes().iterator();
while (it.hasNext()) {
Object next = it.next();
sootParamsTypes.add(Util.getSootType((polyglot.types.Type)next));
}
return sootParamsTypes;
}
/**
* Gets the Soot Method form the given Soot Class
*/
private soot.SootMethodRef getMethodFromClass(soot.SootClass sootClass, String name, ArrayList paramTypes, soot.Type returnType, boolean isStatic) {
soot.SootMethodRef ref = soot.Scene.v().makeMethodRef(sootClass, name, paramTypes, returnType, isStatic);
return ref;
}
/**
* Adds extra params
*/
private void handleFinalLocalParams(ArrayList sootParams, ArrayList sootParamTypes, polyglot.types.ClassType keyType){
HashMap finalLocalInfo = soot.javaToJimple.InitialResolver.v().finalLocalInfo();
if (finalLocalInfo != null){
if (finalLocalInfo.containsKey(new polyglot.util.IdentityKey(keyType))){
AnonLocalClassInfo alci = (AnonLocalClassInfo)finalLocalInfo.get(new polyglot.util.IdentityKey(keyType));
ArrayList finalLocals = alci.finalLocalsUsed();
if (finalLocals != null){
Iterator it = finalLocals.iterator();
while (it.hasNext()){
Object next = it.next();
polyglot.types.LocalInstance li = (polyglot.types.LocalInstance)((polyglot.util.IdentityKey)next).object();
sootParamTypes.add(Util.getSootType(li.type()));
sootParams.add(getLocal(li));
}
}
}
}
}
protected soot.Local getThis(soot.Type sootType){
return Util.getThis(sootType, body, getThisMap, lg);
}
protected boolean needsOuterClassRef(polyglot.types.ClassType typeToInvoke){
// anon and local
AnonLocalClassInfo info = (AnonLocalClassInfo)InitialResolver.v().finalLocalInfo().get(new polyglot.util.IdentityKey(typeToInvoke));
if (InitialResolver.v().isAnonInCCall(typeToInvoke)) return false;
if ((info != null) && (!info.inStaticMethod())){
return true;
}
// other nested
else if (typeToInvoke.isNested() && !typeToInvoke.flags().isStatic() && !typeToInvoke.isAnonymous() && !typeToInvoke.isLocal()){
return true;
}
return false;
}
/**
* adds outer class params
*/
private void handleOuterClassParams(ArrayList sootParams, soot.Value qVal, ArrayList sootParamsTypes, polyglot.types.ClassType typeToInvoke){
ArrayList needsRef = soot.javaToJimple.InitialResolver.v().getHasOuterRefInInit();
boolean addRef = needsOuterClassRef(typeToInvoke);//(needsRef != null) && (needsRef.contains(Util.getSootType(typeToInvoke)));
if (addRef){
// if adding an outer type ref always add exact type
soot.SootClass outerClass = ((soot.RefType)Util.getSootType(typeToInvoke.outer())).getSootClass();
sootParamsTypes.add(outerClass.getType());
}
if (addRef && !typeToInvoke.isAnonymous() && (qVal != null)){
// for nested and local if qualifier use that for param
sootParams.add(qVal);
}
else if (addRef && !typeToInvoke.isAnonymous()){
soot.SootClass outerClass = ((soot.RefType)Util.getSootType(typeToInvoke.outer())).getSootClass();
sootParams.add(getThis(outerClass.getType()));
}
else if (addRef && typeToInvoke.isAnonymous()){
soot.SootClass outerClass = ((soot.RefType)Util.getSootType(typeToInvoke.outer())).getSootClass();
sootParams.add(getThis(outerClass.getType()));
}
// handle anon qualifiers
if (typeToInvoke.isAnonymous() && (qVal != null)){
sootParamsTypes.add(qVal.getType());
sootParams.add(qVal);
}
}
/**
* Constructor Call Creation
*/
private void createConstructorCall(polyglot.ast.ConstructorCall cCall) {
ArrayList sootParams = new ArrayList();
ArrayList sootParamsTypes = new ArrayList();
polyglot.types.ConstructorInstance cInst = cCall.constructorInstance();
String containerName = null;
if (cInst.container() instanceof polyglot.types.ClassType) {
containerName = ((polyglot.types.ClassType)cInst.container()).fullName();
}
soot.SootClass classToInvoke;
if (cCall.kind() == polyglot.ast.ConstructorCall.SUPER) {
classToInvoke = ((soot.RefType)Util.getSootType(cInst.container())).getSootClass();
}
else if (cCall.kind() == polyglot.ast.ConstructorCall.THIS) {
classToInvoke = body.getMethod().getDeclaringClass();
}
else {
throw new RuntimeException("Unknown kind of Constructor Call");
}
soot.Local base = specialThisLocal;
polyglot.types.ClassType objType = (polyglot.types.ClassType)cInst.container();
soot.Local qVal = null;
if (cCall.qualifier() != null){
qVal = (soot.Local)base().createExpr(cCall.qualifier());
}
handleOuterClassParams(sootParams, qVal, sootParamsTypes, objType);
sootParams.addAll(getSootParams(cCall));
sootParamsTypes.addAll(getSootParamsTypes(cCall));
handleFinalLocalParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)cCall.constructorInstance().container());
soot.SootMethodRef methodToInvoke = getMethodFromClass(classToInvoke, "<init>", sootParamsTypes, soot.VoidType.v(), false);
soot.jimple.SpecialInvokeExpr specialInvokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(base, methodToInvoke, sootParams);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(specialInvokeExpr);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, cCall.position());
// this is clearly broken if an outer class this ref was added as first
// param
int numParams = 0;
Iterator invokeParamsIt = cCall.arguments().iterator();
while (invokeParamsIt.hasNext()) {
Util.addLnPosTags(specialInvokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)invokeParamsIt.next()).position());
numParams++;
}
// if method is <init> handle field inits
if (body.getMethod().getName().equals("<init>") && (cCall.kind() == polyglot.ast.ConstructorCall.SUPER)){
handleOuterClassThisInit(body.getMethod());
handleFinalLocalInits();
handleFieldInits(body.getMethod());
handleInitializerBlocks(body.getMethod());
}
}
private void handleFinalLocalInits(){
ArrayList finalsList = ((PolyglotMethodSource)body.getMethod().getSource()).getFinalsList();
if (finalsList == null) return;
int paramCount = paramRefCount - finalsList.size();
Iterator it = finalsList.iterator();
while (it.hasNext()){
soot.SootField sf = (soot.SootField)it.next();
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, sf.makeRef());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(fieldRef, body.getParameterLocal(paramCount));
body.getUnits().add(stmt);
paramCount++;
}
}
/**
* Local Class Decl - Local Inner Class
*/
private void createLocalClassDecl(polyglot.ast.LocalClassDecl cDecl) {
BiMap lcMap = InitialResolver.v().getLocalClassMap();
String name = Util.getSootType(cDecl.decl().type()).toString();
if (!InitialResolver.v().hasClassInnerTag(body.getMethod().getDeclaringClass(), name)){
Util.addInnerClassTag(body.getMethod().getDeclaringClass(), name, null, cDecl.decl().name(), Util.getModifier(cDecl.decl().flags()));
}
}
/**
* New Expression Creation
*/
private soot.Local getNewLocal(polyglot.ast.New newExpr) {
// handle parameters/args
ArrayList sootParams = new ArrayList();
ArrayList sootParamsTypes = new ArrayList();
polyglot.types.ClassType objType = (polyglot.types.ClassType)newExpr.objectType().type();
if (newExpr.anonType() != null){
objType = newExpr.anonType();
// add inner class tags for any anon classes created
String name = Util.getSootType(objType).toString();
polyglot.types.ClassType outerType = objType.outer();
if (!InitialResolver.v().hasClassInnerTag(body.getMethod().getDeclaringClass(), name)){
Util.addInnerClassTag(body.getMethod().getDeclaringClass(), name, null, null, outerType.flags().isInterface() ? soot.Modifier.PUBLIC | soot.Modifier.STATIC : Util.getModifier(objType.flags()));
}
}
else {
// not an anon class but actually invoking a new something
if (!objType.isTopLevel()){
String name = Util.getSootType(objType).toString();
polyglot.types.ClassType outerType = objType.outer();
if (!InitialResolver.v().hasClassInnerTag(body.getMethod().getDeclaringClass(), name)){
Util.addInnerClassTag(body.getMethod().getDeclaringClass(), name, Util.getSootType(outerType).toString(), objType.name(), outerType.flags().isInterface() ? soot.Modifier.PUBLIC | soot.Modifier.STATIC : Util.getModifier(objType.flags()));
}
}
}
soot.RefType sootType = (soot.RefType)Util.getSootType(objType);
soot.Local retLocal = lg.generateLocal(sootType);
soot.jimple.NewExpr sootNew = soot.jimple.Jimple.v().newNewExpr(sootType);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootNew);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, newExpr.position());
Util.addLnPosTags(stmt.getRightOpBox(), newExpr.position());
soot.SootClass classToInvoke = sootType.getSootClass();
// if no qualifier --> X to invoke is static
soot.Value qVal = null;
if (newExpr.qualifier() != null) {
qVal = base().createExpr(newExpr.qualifier());
}
handleOuterClassParams(sootParams, qVal, sootParamsTypes, objType);
sootParams.addAll(getSootParams(newExpr));
sootParamsTypes.addAll(getSootParamsTypes(newExpr));
handleFinalLocalParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)objType);
soot.SootMethodRef methodToInvoke = getMethodFromClass(classToInvoke, "<init>", sootParamsTypes, soot.VoidType.v(), false);
soot.jimple.SpecialInvokeExpr specialInvokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(retLocal, methodToInvoke, sootParams);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(specialInvokeExpr);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, newExpr.position());
int numParams = 0;
Iterator invokeParamsIt = newExpr.arguments().iterator();
while (invokeParamsIt.hasNext()) {
Util.addLnPosTags(specialInvokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)invokeParamsIt.next()).position());
numParams++;
}
return retLocal;
}
protected soot.SootMethodRef getSootMethodRef(polyglot.ast.Call call){
soot.Type sootRecType;
soot.SootClass receiverTypeClass;
if (Util.getSootType(call.methodInstance().container()).equals(soot.RefType.v("java.lang.Object"))){
sootRecType = soot.RefType.v("java.lang.Object");
receiverTypeClass = soot.Scene.v().getSootClass("java.lang.Object");
}
else {
if (call.target().type() == null){
sootRecType = Util.getSootType(call.methodInstance().container());
}
else {
sootRecType = Util.getSootType(call.target().type());
}
if (sootRecType instanceof soot.RefType){
receiverTypeClass = ((soot.RefType)sootRecType).getSootClass();
}
else if (sootRecType instanceof soot.ArrayType){
receiverTypeClass = soot.Scene.v().getSootClass("java.lang.Object");
}
else {
throw new RuntimeException("call target problem: "+call);
}
}
polyglot.types.MethodInstance methodInstance = call.methodInstance();
soot.Type sootRetType = Util.getSootType(methodInstance.returnType());
ArrayList sootParamsTypes = getSootParamsTypes(call);
soot.SootMethodRef callMethod = soot.Scene.v().makeMethodRef(receiverTypeClass, methodInstance.name(), sootParamsTypes, sootRetType, methodInstance.flags().isStatic());
return callMethod;
}
/**
* Call Expression Creation
*/
private soot.Local getCallLocal(polyglot.ast.Call call){
// handle name
String name = call.name();
// handle receiver/target
polyglot.ast.Receiver receiver = call.target();
//System.out.println("call: "+call+" receiver: "+receiver);
soot.Local baseLocal;
if ((receiver instanceof polyglot.ast.Special) && (((polyglot.ast.Special)receiver).kind() == polyglot.ast.Special.SUPER) && (((polyglot.ast.Special)receiver).qualifier() != null)){
baseLocal = getSpecialSuperQualifierLocal(call);
return baseLocal;
}
baseLocal = (soot.Local)base().getBaseLocal(receiver);
//System.out.println("base local: "+baseLocal);
ArrayList sootParams = getSootParams(call);
soot.SootMethodRef callMethod = base().getSootMethodRef(call);
soot.Type sootRecType;
soot.SootClass receiverTypeClass;
if (Util.getSootType(call.methodInstance().container()).equals(soot.RefType.v("java.lang.Object"))){
sootRecType = soot.RefType.v("java.lang.Object");
receiverTypeClass = soot.Scene.v().getSootClass("java.lang.Object");
}
else {
if (call.target().type() == null){
sootRecType = Util.getSootType(call.methodInstance().container());
}
else {
sootRecType = Util.getSootType(call.target().type());
}
if (sootRecType instanceof soot.RefType){
receiverTypeClass = ((soot.RefType)sootRecType).getSootClass();
}
else if (sootRecType instanceof soot.ArrayType){
receiverTypeClass = soot.Scene.v().getSootClass("java.lang.Object");
}
else {
throw new RuntimeException("call target problem: "+call);
}
}
polyglot.types.MethodInstance methodInstance = call.methodInstance();
/*soot.Type sootRetType = Util.getSootType(methodInstance.returnType());
ArrayList sootParamsTypes = getSootParamsTypes(call);
ArrayList sootParams = getSootParams(call);
soot.SootMethodRef callMethod = soot.Scene.v().makeMethodRef(receiverTypeClass, methodInstance.name(), sootParamsTypes, sootRetType, methodInstance.flags().isStatic());*/
boolean isPrivateAccess = false;
//if (call.methodInstance().flags().isPrivate() && !Util.getSootType(call.methodInstance().container()).equals(body.getMethod().getDeclaringClass().getType())){
if (needsAccessor(call)){
soot.SootClass containingClass = ((soot.RefType)Util.getSootType(call.methodInstance().container())).getSootClass();
soot.SootClass classToAddMethTo = containingClass;
if (call.methodInstance().flags().isProtected()){
if (InitialResolver.v().hierarchy() == null){
InitialResolver.v().hierarchy(new soot.FastHierarchy());
}
soot.SootClass addToClass;
if (body.getMethod().getDeclaringClass().hasOuterClass()){
addToClass = body.getMethod().getDeclaringClass().getOuterClass();
while (!InitialResolver.v().hierarchy().canStoreType(containingClass.getType(), addToClass.getType())){
if (addToClass.hasOuterClass()){
addToClass = addToClass.getOuterClass();
}
else {
break;
}
}
}
else{
addToClass = containingClass;
}
classToAddMethTo = addToClass;
}
callMethod = addGetMethodAccessMeth(classToAddMethTo, call).makeRef();
if (!call.methodInstance().flags().isStatic()){
if (call.target() instanceof polyglot.ast.Expr){
sootParams.add(0, baseLocal);
}
else if (body.getMethod().getDeclaringClass().declaresFieldByName("this$0")){
sootParams.add(0, getThis(Util.getSootType(call.methodInstance().container())));//baseLocal);
}
else {
sootParams.add(0, baseLocal);
}
}
isPrivateAccess = true;
}
soot.jimple.InvokeExpr invokeExpr;
if (isPrivateAccess){
// for accessing private methods in outer class -> always static
invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(callMethod, sootParams);
}
else if (soot.Modifier.isInterface(receiverTypeClass.getModifiers()) && methodInstance.flags().isAbstract()) {
// if reciever class is interface and method is abstract -> interface
invokeExpr = soot.jimple.Jimple.v().newInterfaceInvokeExpr(baseLocal, callMethod, sootParams);
}
else if (methodInstance.flags().isStatic()){
// if flag isStatic -> static invoke
invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(callMethod, sootParams);
}
else if (methodInstance.flags().isPrivate()){
// if flag isPrivate -> special invoke
invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(baseLocal, callMethod, sootParams);
}
else if ((receiver instanceof polyglot.ast.Special) &&
(((polyglot.ast.Special)receiver).kind() == polyglot.ast.Special.SUPER)){
// receiver is special super -> special
invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(baseLocal, callMethod, sootParams);
}
else {
// else virtual invoke
invokeExpr = soot.jimple.Jimple.v().newVirtualInvokeExpr(baseLocal, callMethod, sootParams);
}
int numParams = 0;
Iterator callParamsIt = call.arguments().iterator();
while (callParamsIt.hasNext()) {
Util.addLnPosTags(invokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)callParamsIt.next()).position());
numParams++;
}
if (invokeExpr instanceof soot.jimple.InstanceInvokeExpr) {
Util.addLnPosTags(((soot.jimple.InstanceInvokeExpr)invokeExpr).getBaseBox(), call.target().position());
}
// create an assign stmt so invoke can be used somewhere else
if (invokeExpr.getMethodRef().returnType().equals(soot.VoidType.v())) {
soot.jimple.Stmt invoke = soot.jimple.Jimple.v().newInvokeStmt(invokeExpr);
body.getUnits().add(invoke);
Util.addLnPosTags(invoke, call.position());
return null;
}
else {
soot.Local retLocal = lg.generateLocal(invokeExpr.getMethodRef().returnType());
soot.jimple.Stmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invokeExpr);
// add assign stmt to body
body.getUnits().add(assignStmt);
Util.addLnPosTags(assignStmt, call.position());
return retLocal;
}
}
protected soot.Value getBaseLocal(polyglot.ast.Receiver receiver) {
if (receiver instanceof polyglot.ast.TypeNode) {
return generateLocal(((polyglot.ast.TypeNode)receiver).type());
}
else {
soot.Value val = base().createExpr((polyglot.ast.Expr)receiver);
if (val instanceof soot.jimple.Constant) {
soot.Local retLocal = lg.generateLocal(val.getType());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, val);
body.getUnits().add(stmt);
return retLocal;
}
return val;
}
}
/**
* NewArray Expression Creation
*/
private soot.Local getNewArrayLocal(polyglot.ast.NewArray newArrExpr) {
soot.Type sootType = Util.getSootType(newArrExpr.type());
//System.out.println("creating new array of type: "+sootType);
soot.jimple.Expr expr;
if (newArrExpr.numDims() == 1) {
soot.Value dimLocal;
if (newArrExpr.additionalDims() == 1) {
dimLocal = soot.jimple.IntConstant.v(1);
}
else {
dimLocal = base().createExpr((polyglot.ast.Expr)newArrExpr.dims().get(0));
}
//System.out.println("creating new array: "+((soot.ArrayType)sootType).getElementType());
soot.jimple.NewArrayExpr newArrayExpr = soot.jimple.Jimple.v().newNewArrayExpr(((soot.ArrayType)sootType).getElementType(), dimLocal);
expr = newArrayExpr;
if (newArrExpr.additionalDims() != 1){
Util.addLnPosTags(newArrayExpr.getSizeBox(), ((polyglot.ast.Expr)newArrExpr.dims().get(0)).position());
}
}
else {
ArrayList valuesList = new ArrayList();
Iterator it = newArrExpr.dims().iterator();
while (it.hasNext()){
valuesList.add(base().createExpr((polyglot.ast.Expr)it.next()));
}
if (newArrExpr.additionalDims() != 0) {
valuesList.add(soot.jimple.IntConstant.v(newArrExpr.additionalDims()));
}
soot.jimple.NewMultiArrayExpr newMultiArrayExpr = soot.jimple.Jimple.v().newNewMultiArrayExpr((soot.ArrayType)sootType, valuesList);
expr = newMultiArrayExpr;
Iterator sizeBoxIt = newArrExpr.dims().iterator();
int counter = 0;
while (sizeBoxIt.hasNext()){
Util.addLnPosTags(newMultiArrayExpr.getSizeBox(counter), ((polyglot.ast.Expr)sizeBoxIt.next()).position());
counter++;
}
}
soot.Local retLocal = lg.generateLocal(sootType);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, expr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, newArrExpr.position());
Util.addLnPosTags(stmt.getRightOpBox(), newArrExpr.position());
// handle array init if one exists
if (newArrExpr.init() != null) {
soot.Value initVal = getArrayInitLocal(newArrExpr.init(), newArrExpr.type());
soot.jimple.AssignStmt initStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, initVal);
body.getUnits().add(initStmt);
}
return retLocal;
}
/**
* create ArrayInit given init and the array local
*/
private soot.Local getArrayInitLocal(polyglot.ast.ArrayInit arrInit, polyglot.types.Type lhsType) {
//System.out.println("lhs type: "+lhsType);
soot.Local local = generateLocal(lhsType);
//System.out.println("creating new array: "+((soot.ArrayType)local.getType()).getElementType());
soot.jimple.NewArrayExpr arrExpr = soot.jimple.Jimple.v().newNewArrayExpr(((soot.ArrayType)local.getType()).getElementType(), soot.jimple.IntConstant.v(arrInit.elements().size()));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, arrExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, arrInit.position());
Iterator it = arrInit.elements().iterator();
int index = 0;
while (it.hasNext()){
polyglot.ast.Expr elemExpr = (polyglot.ast.Expr)it.next();
soot.Value elem;
if (elemExpr instanceof polyglot.ast.ArrayInit){
if (((polyglot.ast.ArrayInit)elemExpr).type() instanceof polyglot.types.NullType) {
if (lhsType instanceof polyglot.types.ArrayType){
//System.out.println("coming from 1 in get arrayinitlocal"+((polyglot.types.ArrayType)lhsType).base());
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.types.ArrayType)lhsType).base());
}
else {
//System.out.println("coming from 2 in get arrayinitlocal"+((polyglot.types.ArrayType)lhsType).base());
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, lhsType);
}
}
else {
//System.out.println("coming from 3 in get arrayinitlocal"+((polyglot.types.ArrayType)lhsType).base());
//elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.ast.ArrayInit)elemExpr).type());
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.types.ArrayType)lhsType).base());
}
}
else {
elem = base().createExpr(elemExpr);
}
soot.jimple.ArrayRef arrRef = soot.jimple.Jimple.v().newArrayRef(local, soot.jimple.IntConstant.v(index));
soot.jimple.AssignStmt elemAssign = soot.jimple.Jimple.v().newAssignStmt(arrRef, elem);
body.getUnits().add(elemAssign);
Util.addLnPosTags(elemAssign, elemExpr.position());
Util.addLnPosTags(elemAssign.getRightOpBox(), elemExpr.position());
index++;
}
return local;
}
/**
* create LHS expressions
*/
protected soot.Value createLHS(polyglot.ast.Expr expr) {
if (expr instanceof polyglot.ast.Local) {
return getLocal((polyglot.ast.Local)expr);
}
else if (expr instanceof polyglot.ast.ArrayAccess) {
return getArrayRefLocalLeft((polyglot.ast.ArrayAccess)expr);
}
else if (expr instanceof polyglot.ast.Field) {
return getFieldLocalLeft((polyglot.ast.Field)expr);
}
else {
throw new RuntimeException("Unhandled LHS");
}
}
/**
* Array Ref Expression Creation - LHS
*/
private soot.Value getArrayRefLocalLeft(polyglot.ast.ArrayAccess arrayRefExpr) {
polyglot.ast.Expr array = arrayRefExpr.array();
polyglot.ast.Expr access = arrayRefExpr.index();
soot.Local arrLocal = (soot.Local)base().createExpr(array);
soot.Value arrAccess = base().createExpr(access);
soot.Local retLocal = generateLocal(arrayRefExpr.type());
soot.jimple.ArrayRef ref = soot.jimple.Jimple.v().newArrayRef(arrLocal, arrAccess);
Util.addLnPosTags(ref.getBaseBox(), arrayRefExpr.array().position());
Util.addLnPosTags(ref.getIndexBox(), arrayRefExpr.index().position());
return ref;
}
/**
* Array Ref Expression Creation
*/
private soot.Value getArrayRefLocal(polyglot.ast.ArrayAccess arrayRefExpr) {
polyglot.ast.Expr array = arrayRefExpr.array();
polyglot.ast.Expr access = arrayRefExpr.index();
soot.Local arrLocal = (soot.Local)base().createExpr(array);
soot.Value arrAccess = base().createExpr(access);
soot.Local retLocal = generateLocal(arrayRefExpr.type());
soot.jimple.ArrayRef ref = soot.jimple.Jimple.v().newArrayRef(arrLocal, arrAccess);
Util.addLnPosTags(ref.getBaseBox(), arrayRefExpr.array().position());
Util.addLnPosTags(ref.getIndexBox(), arrayRefExpr.index().position());
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, ref);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, arrayRefExpr.position());
return retLocal;
}
private soot.Local getSpecialSuperQualifierLocal(polyglot.ast.Expr expr){
soot.SootClass classToInvoke;
ArrayList methodParams = new ArrayList();
if (expr instanceof polyglot.ast.Call){
polyglot.ast.Special target = (polyglot.ast.Special)((polyglot.ast.Call)expr).target();
classToInvoke = ((soot.RefType)Util.getSootType(target.qualifier().type())).getSootClass();
methodParams = getSootParams((polyglot.ast.Call)expr);
}
else if (expr instanceof polyglot.ast.Field){
polyglot.ast.Special target = (polyglot.ast.Special)((polyglot.ast.Field)expr).target();
classToInvoke = ((soot.RefType)Util.getSootType(target.qualifier().type())).getSootClass();
}
else {
throw new RuntimeException("Trying to create special super qualifier for: "+expr+" which is not a field or call");
}
// make an access method
soot.SootMethod methToInvoke = makeSuperAccessMethod(classToInvoke, expr);
// invoke it
soot.Local classToInvokeLocal = Util.getThis(classToInvoke.getType(), body, getThisMap, lg);
methodParams.add(0, classToInvokeLocal);
soot.jimple.InvokeExpr invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(methToInvoke.makeRef(), methodParams);
// return the local of return type if not void
if (!methToInvoke.getReturnType().equals(soot.VoidType.v())){
soot.Local retLocal = lg.generateLocal(methToInvoke.getReturnType());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invokeExpr);
body.getUnits().add(stmt);
return retLocal;
}
else {
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(invokeExpr));
return null;
}
}
/**
* Special Expression Creation
*/
private soot.Local getSpecialLocal(polyglot.ast.Special specialExpr) {
//System.out.println(specialExpr);
if (specialExpr.kind() == polyglot.ast.Special.SUPER) {
if (specialExpr.qualifier() == null){
return specialThisLocal;
}
else {
// this isn't enough
// need to getThis for the type which may be several levels up
// add access$N method to class of the type which returns
// field or method wanted
// invoke it
// and it needs to be called specially when getting fields
// or calls because need to know field or method to access
// as it access' a field or meth in the super class of the
// outer class refered to by the qualifier
return getThis(Util.getSootType(specialExpr.qualifier().type()));
}
}
else if (specialExpr.kind() == polyglot.ast.Special.THIS) {
//System.out.println("this is special this: "+specialExpr);
if (specialExpr.qualifier() == null) {
return specialThisLocal;
}
else {
return getThis(Util.getSootType(specialExpr.qualifier().type()));
}
}
else {
throw new RuntimeException("Unknown Special");
}
}
private soot.SootMethod makeSuperAccessMethod(soot.SootClass classToInvoke, Object memberToAccess){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
paramTypes.add(classToInvoke.getType());
soot.SootMethod meth;
soot.MethodSource src;
if (memberToAccess instanceof polyglot.ast.Field){
polyglot.ast.Field fieldToAccess = (polyglot.ast.Field)memberToAccess;
meth = new soot.SootMethod(name, paramTypes, Util.getSootType(fieldToAccess.type()), soot.Modifier.STATIC);
PrivateFieldAccMethodSource fSrc = new PrivateFieldAccMethodSource(
Util.getSootType(fieldToAccess.type()),
fieldToAccess.name(),
fieldToAccess.flags().isStatic(),
((soot.RefType)Util.getSootType(fieldToAccess.target().type())).getSootClass()
);
src = fSrc;
}
else if (memberToAccess instanceof polyglot.ast.Call){
polyglot.ast.Call methToAccess = (polyglot.ast.Call)memberToAccess;
paramTypes.addAll(getSootParamsTypes(methToAccess));
meth = new soot.SootMethod(name, paramTypes, Util.getSootType(methToAccess.methodInstance().returnType()), soot.Modifier.STATIC);
PrivateMethodAccMethodSource mSrc = new PrivateMethodAccMethodSource( methToAccess.methodInstance());
src = mSrc;
}
else {
throw new RuntimeException("trying to access unhandled member type: "+memberToAccess);
}
classToInvoke.addMethod(meth);
meth.setActiveBody(src.getBody(meth, null));
meth.addTag(new soot.tagkit.SyntheticTag());
return meth;
}
/**
* InstanceOf Expression Creation
*/
private soot.Local getInstanceOfLocal(polyglot.ast.Instanceof instExpr) {
soot.Type sootType = Util.getSootType(instExpr.compareType().type());
soot.Value local = base().createExpr(instExpr.expr());
soot.jimple.InstanceOfExpr instOfExpr = soot.jimple.Jimple.v().newInstanceOfExpr(local, sootType);
soot.Local lhs = lg.generateLocal(soot.BooleanType.v());
soot.jimple.AssignStmt instAssign = soot.jimple.Jimple.v().newAssignStmt(lhs, instOfExpr);
body.getUnits().add(instAssign);
Util.addLnPosTags(instAssign, instExpr.position());
Util.addLnPosTags(instAssign.getRightOpBox(), instExpr.position());
Util.addLnPosTags(instOfExpr.getOpBox(), instExpr.expr().position());
return lhs;
}
/**
* Condition Expression Creation - can maybe merge with If
*/
private soot.Local getConditionalLocal(polyglot.ast.Conditional condExpr){
// handle cond
polyglot.ast.Expr condition = condExpr.cond();
soot.Value sootCond = base().createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt, condExpr.position());
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
}
soot.Local retLocal = generateLocal(condExpr.type());
// handle consequence
polyglot.ast.Expr consequence = condExpr.consequent();
soot.Value conseqVal = base().createExpr(consequence);
if (conseqVal instanceof soot.jimple.ConditionExpr) {
conseqVal = handleCondBinExpr((soot.jimple.ConditionExpr)conseqVal);
}
soot.jimple.AssignStmt conseqAssignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, conseqVal);
body.getUnits().add(conseqAssignStmt);
Util.addLnPosTags(conseqAssignStmt, condExpr.position());
Util.addLnPosTags(conseqAssignStmt.getRightOpBox(), consequence.position());
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
// handle alternative
body.getUnits().add(noop1);
polyglot.ast.Expr alternative = condExpr.alternative();
if (alternative != null){
soot.Value altVal = base().createExpr(alternative);
if (altVal instanceof soot.jimple.ConditionExpr) {
altVal = handleCondBinExpr((soot.jimple.ConditionExpr)altVal);
}
soot.jimple.AssignStmt altAssignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, altVal);
body.getUnits().add(altAssignStmt);
Util.addLnPosTags(altAssignStmt, condExpr.position());
Util.addLnPosTags(altAssignStmt, alternative.position());
Util.addLnPosTags(altAssignStmt.getRightOpBox(), alternative.position());
}
body.getUnits().add(noop2);
return retLocal;
}
/**
* Utility methods
*/
/*private boolean isLitOrLocal(polyglot.ast.Expr exp) {
if (exp instanceof polyglot.ast.Lit) return true;
if (exp instanceof polyglot.ast.Local) return true;
else return false;
}*/
/**
* Extra Local Variables Generation
*/
protected soot.Local generateLocal(polyglot.types.Type polyglotType) {
soot.Type type = Util.getSootType(polyglotType);
return lg.generateLocal(type);
}
protected soot.Local generateLocal(soot.Type sootType){
return lg.generateLocal(sootType);
}
}
|
package soot.javaToJimple;
import java.util.*;
public class JimpleBodyBuilder {
soot.jimple.JimpleBody body; // body of the method being created
ArrayList exceptionTable; // list of exceptions
Stack endControlNoop = new Stack(); // for break
Stack condControlNoop = new Stack(); // continue
Stack monitorStack; // for synchronized blocks
Stack tryStack; // for try stmts in case of returns
Stack catchStack; // for catch stmts in case of returns
HashMap labelBreakMap; // for break label --> nop to jump to
HashMap labelContinueMap; // for continue label --> nop to jump to
HashMap localsMap = new HashMap(); // localInst --> soot local
HashMap getThisMap = new HashMap(); // type --> local to ret
soot.Local specialThisLocal;
soot.Local outerClassParamLocal; // outer class this
private int paramRefCount = 0; // counter for param ref stmts
LocalGenerator lg; // for generated locals not in orig src
/**
* Jimple Body Creation
*/
public soot.jimple.JimpleBody createJimpleBody(polyglot.ast.Block block, List formals, soot.SootMethod sootMethod){
createBody(sootMethod);
lg = new LocalGenerator(body);
// create this formal except for static methods
if (!soot.Modifier.isStatic(sootMethod.getModifiers())) {
soot.RefType type = sootMethod.getDeclaringClass().getType();
specialThisLocal = soot.jimple.Jimple.v().newLocal("this", type);
body.getLocals().add(specialThisLocal);
soot.jimple.ThisRef thisRef = soot.jimple.Jimple.v().newThisRef(type);
soot.jimple.Stmt thisStmt = soot.jimple.Jimple.v().newIdentityStmt(specialThisLocal, thisRef);
body.getUnits().add(thisStmt);
// this is causing problems - no this in java code -> no tags
//Util.addLineTag(thisStmt, block);
}
int formalsCounter = 0;
//create outer class this param ref for inner classes except for static inner classes - this is not needed
int outerIndex = sootMethod.getDeclaringClass().getName().lastIndexOf("$");
int classMod = sootMethod.getDeclaringClass().getModifiers();
if ((outerIndex != -1) && (sootMethod.getName().equals("<init>")) && sootMethod.getDeclaringClass().declaresFieldByName("this$0")){
// we know its an inner non static class can get outer class
// from field ref of the this$0 field
soot.SootClass outerClass = ((soot.RefType)sootMethod.getDeclaringClass().getFieldByName("this$0").getType()).getSootClass();
soot.Local outerLocal = lg.generateLocal(outerClass.getType());
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(outerClass.getType(), formalsCounter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(outerLocal, paramRef);
body.getUnits().add(stmt);
((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).setOuterClassThisInit(outerLocal);
outerClassParamLocal = outerLocal;
formalsCounter++;
}
// handle formals
if (formals != null) {
ArrayList formalNames = new ArrayList();
Iterator formalsIt = formals.iterator();
while (formalsIt.hasNext()) {
polyglot.ast.Formal formal = (polyglot.ast.Formal)formalsIt.next();
createFormal(formal, formalsCounter);
formalNames.add(formal.name());
formalsCounter++;
}
body.getMethod().addTag(new soot.tagkit.ParamNamesTag(formalNames));
}
// handle final local params
ArrayList finalsList = ((PolyglotMethodSource)body.getMethod().getSource()).getFinalsList();
if (finalsList != null){
Iterator finalsIt = finalsList.iterator();
while (finalsIt.hasNext()){
soot.SootField sf = (soot.SootField)finalsIt.next();
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(sf.getType(), formalsCounter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(lg.generateLocal(sf.getType()), paramRef);
body.getUnits().add(stmt);
formalsCounter++;
}
}
createBlock(block);
// if method is <clinit> handle static field inits
if (sootMethod.getName().equals("<clinit>")){
handleAssert(sootMethod);
handleStaticFieldInits(sootMethod);
handleStaticInitializerBlocks(sootMethod);
}
// determine if body has a return stmt
boolean hasReturn = false;
if (block != null) {
Iterator it = block.statements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Return){
hasReturn = true;
}
}
}
soot.Type retType = body.getMethod().getReturnType();
// only do this if noexplicit return
if ((!hasReturn) && (retType instanceof soot.VoidType)) {
soot.jimple.Stmt retStmt = soot.jimple.Jimple.v().newReturnVoidStmt();
body.getUnits().add(retStmt);
}
// add exceptions from exceptionTable
if (exceptionTable != null) {
Iterator trapsIt = exceptionTable.iterator();
while (trapsIt.hasNext()){
body.getTraps().add((soot.Trap)trapsIt.next());
}
}
return body;
}
private void handleAssert(soot.SootMethod sootMethod){
if (!((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).hasAssert()) return;
((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).addAssertInits(body);
}
/**
* adds any needed field inits
*/
private void handleFieldInits(soot.SootMethod sootMethod) {
ArrayList fieldInits = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getFieldInits();
if (fieldInits != null) {
handleFieldInits(fieldInits);
}
}
protected void handleFieldInits(ArrayList fieldInits){
Iterator fieldInitsIt = fieldInits.iterator();
while (fieldInitsIt.hasNext()) {
polyglot.ast.FieldDecl field = (polyglot.ast.FieldDecl)fieldInitsIt.next();
String fieldName = field.name();
polyglot.ast.Expr initExpr = field.init();
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
soot.SootField sootField = currentClass.getField(fieldName, Util.getSootType(field.type().type()));
soot.Local base = specialThisLocal;
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(base, sootField);
soot.Value sootExpr;
if (initExpr instanceof polyglot.ast.ArrayInit) {
sootExpr = getArrayInitLocal((polyglot.ast.ArrayInit)initExpr, field.type().type());
}
else {
sootExpr = createExpr(initExpr);
}
if (sootExpr instanceof soot.jimple.ConditionExpr) {
sootExpr = handleCondBinExpr((soot.jimple.ConditionExpr)sootExpr);
}
soot.jimple.AssignStmt assign;
if (sootExpr instanceof soot.Local){
assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, (soot.Local)sootExpr);
}
else if (sootExpr instanceof soot.jimple.Constant){
assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, (soot.jimple.Constant)sootExpr);
}
else {
throw new RuntimeException("fields must assign to local or constant only");
}
body.getUnits().add(assign);
Util.addLnPosTags(assign, initExpr.position());
Util.addLnPosTags(assign.getRightOpBox(), initExpr.position());
}
}
/**
* adds this field for the outer class
*/
private void handleOuterClassThisInit(soot.SootMethod sootMethod) {
// static inner classes are different
if (body.getMethod().getDeclaringClass().declaresFieldByName("this$0")){
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, body.getMethod().getDeclaringClass().getFieldByName("this$0"));
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(fieldRef, outerClassParamLocal);
body.getUnits().add(stmt);
}
}
/**
* adds any needed static field inits
*/
private void handleStaticFieldInits(soot.SootMethod sootMethod) {
ArrayList staticFieldInits = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getStaticFieldInits();
if (staticFieldInits != null) {
Iterator staticFieldInitsIt = staticFieldInits.iterator();
while (staticFieldInitsIt.hasNext()) {
polyglot.ast.FieldDecl field = (polyglot.ast.FieldDecl)staticFieldInitsIt.next();
String fieldName = field.name();
polyglot.ast.Expr initExpr = field.init();
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
soot.SootField sootField = currentClass.getField(fieldName, Util.getSootType(field.type().type()));
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(sootField);
soot.Value sootExpr;
if (initExpr instanceof polyglot.ast.ArrayInit) {
sootExpr = getArrayInitLocal((polyglot.ast.ArrayInit)initExpr, field.type().type());
}
else {
sootExpr = createExpr(initExpr);
}
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(fieldRef, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, initExpr.position());
}
}
}
/**
* init blocks get created within init methods in Jimple
*/
private void handleInitializerBlocks(soot.SootMethod sootMethod) {
ArrayList initializerBlocks = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getInitializerBlocks();
if (initializerBlocks != null) {
handleStaticBlocks(initializerBlocks);
}
}
protected void handleStaticBlocks(ArrayList initializerBlocks){
Iterator initBlocksIt = initializerBlocks.iterator();
while (initBlocksIt.hasNext()) {
createBlock((polyglot.ast.Block)initBlocksIt.next());
}
}
/**
* static init blocks get created in clinit methods in Jimple
*/
private void handleStaticInitializerBlocks(soot.SootMethod sootMethod) {
ArrayList staticInitializerBlocks = ((soot.javaToJimple.PolyglotMethodSource)sootMethod.getSource()).getStaticInitializerBlocks();
if (staticInitializerBlocks != null) {
Iterator staticInitBlocksIt = staticInitializerBlocks.iterator();
while (staticInitBlocksIt.hasNext()) {
createBlock((polyglot.ast.Block)staticInitBlocksIt.next());
}
}
}
/**
* create body and make it be active
*/
private void createBody(soot.SootMethod sootMethod) {
body = soot.jimple.Jimple.v().newBody(sootMethod);
sootMethod.setActiveBody(body);
}
/**
* Block creation
*/
private void createBlock(polyglot.ast.Block block){
if (block == null) return;
// handle stmts
Iterator it = block.statements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Stmt){
createStmt((polyglot.ast.Stmt)next);
}
else {
throw new RuntimeException("Unexpected - Unhandled Node");
}
}
}
/**
* Catch Formal creation - method parameters
*/
private soot.Local createCatchFormal(polyglot.ast.Formal formal){
soot.Type sootType = Util.getSootType(formal.type().type());
soot.Local formalLocal = createLocal(formal.localInstance());
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, formal.position());
Util.addLnPosTags(((soot.jimple.IdentityStmt) stmt).getRightOpBox(), formal.position());
ArrayList names = new ArrayList();
names.add(formal.name());
stmt.addTag(new soot.tagkit.ParamNamesTag(names));
return formalLocal;
}
/**
* Formal creation - method parameters
*/
private void createFormal(polyglot.ast.Formal formal, int counter){
soot.Type sootType = Util.getSootType(formal.type().type());
soot.Local formalLocal = createLocal(formal.localInstance());
soot.jimple.ParameterRef paramRef = soot.jimple.Jimple.v().newParameterRef(sootType, counter);
paramRefCount++;
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, paramRef);
body.getUnits().add(stmt);
Util.addLnPosTags(((soot.jimple.IdentityStmt) stmt).getRightOpBox(), formal.position());
Util.addLnPosTags(stmt, formal.position());
}
/**
* Literal Creation
*/
private soot.Value createLiteral(polyglot.ast.Lit lit) {
if (lit instanceof polyglot.ast.IntLit) {
polyglot.ast.IntLit intLit = (polyglot.ast.IntLit)lit;
long litValue = intLit.value();
if (intLit.kind() == polyglot.ast.IntLit.INT) {
return soot.jimple.IntConstant.v((int)litValue);
}
else {
return soot.jimple.LongConstant.v(litValue);
}
}
else if (lit instanceof polyglot.ast.StringLit) {
String litValue = ((polyglot.ast.StringLit)lit).value();
return soot.jimple.StringConstant.v(litValue);
}
else if (lit instanceof polyglot.ast.NullLit) {
return soot.jimple.NullConstant.v();
}
else if (lit instanceof polyglot.ast.FloatLit) {
polyglot.ast.FloatLit floatLit = (polyglot.ast.FloatLit)lit;
double litValue = floatLit.value();
if (floatLit.kind() == polyglot.ast.FloatLit.DOUBLE) {
return soot.jimple.DoubleConstant.v(floatLit.value());
}
else {
return soot.jimple.FloatConstant.v((float)(floatLit.value()));
}
}
else if (lit instanceof polyglot.ast.CharLit) {
char litValue = ((polyglot.ast.CharLit)lit).value();
return soot.jimple.IntConstant.v(litValue);
}
else if (lit instanceof polyglot.ast.BooleanLit) {
boolean litValue = ((polyglot.ast.BooleanLit)lit).value();
if (litValue) return soot.jimple.IntConstant.v(1);
else return soot.jimple.IntConstant.v(0);
}
else if (lit instanceof polyglot.ast.ClassLit){
return getSpecialClassLitLocal((polyglot.ast.ClassLit)lit);
}
else {
throw new RuntimeException("Unknown Literal - Unhandled: "+lit.getClass());
}
}
/**
* Local Creation
*/
// this should be used for polyglot locals and formals
private soot.Local createLocal(polyglot.types.LocalInstance localInst) {
soot.Type sootType = Util.getSootType(localInst.type());
String name = localInst.name();
soot.Local sootLocal = createLocal(name, sootType);
localsMap.put(new polyglot.util.IdentityKey(localInst), sootLocal);
return sootLocal;
}
// this should be used for generated locals only
private soot.Local createLocal(String name, soot.Type sootType) {
soot.Local sootLocal = soot.jimple.Jimple.v().newLocal(name, sootType);
body.getLocals().add(sootLocal);
return sootLocal;
}
/**
* Local Retreival
*/
private soot.Local getLocal(polyglot.ast.Local local) {
return getLocal(local.localInstance());
}
/**
* Local Retreival
*/
private soot.Local getLocal(polyglot.types.LocalInstance li) {
if (localsMap.containsKey(new polyglot.util.IdentityKey(li))){
soot.Local sootLocal = (soot.Local)localsMap.get(new polyglot.util.IdentityKey(li));
return sootLocal;
}
else if (body.getMethod().getDeclaringClass().declaresField("val$"+li.name(), Util.getSootType(li.type()))){
soot.Local fieldLocal = generateLocal(li.type());
soot.SootField field = body.getMethod().getDeclaringClass().getField("val$"+li.name(), Util.getSootType(li.type()));
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, field);
soot.jimple.AssignStmt assign = soot.jimple.Jimple.v().newAssignStmt(fieldLocal, fieldRef);
body.getUnits().add(assign);
return fieldLocal;
}
else {
//else create access meth in outer for val$fieldname
// get the this$0 field to find the type of an outer class - has
// to have one because local/anon inner can't declare static
// memebers so for deepnesting not in static context for these
// cases
soot.SootClass currentClass = body.getMethod().getDeclaringClass();
boolean fieldFound = false;
while (!fieldFound){
if (!currentClass.declaresFieldByName("this$0")){
throw new RuntimeException("Trying to get field val$"+li.name()+" from some outer class but can't access the outer class of: "+currentClass.getName()+"!");
}
soot.SootClass outerClass = ((soot.RefType)currentClass.getFieldByName("this$0").getType()).getSootClass();
// look for field of type li.type and name val$li.name in outer
// class
if (outerClass.declaresField("val$"+li.name(), Util.getSootType(li.type()))){
fieldFound = true;
}
currentClass = outerClass;
// repeat until found in some outer class
}
// create and add accessor to that outer class (indic as current)
soot.SootMethod methToInvoke = makeLiFieldAccessMethod(currentClass, li);
// invoke and return
// generate a local that corresponds to the invoke of that meth
ArrayList methParams = new ArrayList();
methParams.add(getThis(currentClass.getType()));
soot.Local res = Util.getPrivateAccessFieldInvoke(methToInvoke, methParams, body, lg);
return res;
}
}
private soot.SootMethod makeLiFieldAccessMethod(soot.SootClass classToInvoke, polyglot.types.LocalInstance li){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
paramTypes.add(classToInvoke.getType());
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, Util.getSootType(li.type()), soot.Modifier.STATIC);
classToInvoke.addMethod(meth);
PrivateFieldAccMethodSource src = new PrivateFieldAccMethodSource();
src.fieldName("val$"+li.name());
src.fieldType(Util.getSootType(li.type()));
src.classToInvoke(classToInvoke);
meth.setActiveBody(src.getBody(meth, null));
return meth;
}
/**
* Stmt creation
*/
private void createStmt(polyglot.ast.Stmt stmt) {
if (stmt instanceof polyglot.ast.Eval) {
createExpr(((polyglot.ast.Eval)stmt).expr());
}
else if (stmt instanceof polyglot.ast.If) {
createIf((polyglot.ast.If)stmt);
}
else if (stmt instanceof polyglot.ast.LocalDecl) {
createLocalDecl((polyglot.ast.LocalDecl)stmt);
}
else if (stmt instanceof polyglot.ast.Block) {
createBlock((polyglot.ast.Block)stmt);
}
else if (stmt instanceof polyglot.ast.While) {
createWhile((polyglot.ast.While)stmt);
}
else if (stmt instanceof polyglot.ast.Do) {
createDo((polyglot.ast.Do)stmt);
}
else if (stmt instanceof polyglot.ast.For) {
createForLoop((polyglot.ast.For)stmt);
}
else if (stmt instanceof polyglot.ast.Switch) {
createSwitch((polyglot.ast.Switch)stmt);
}
else if (stmt instanceof polyglot.ast.Return) {
createReturn((polyglot.ast.Return)stmt);
}
else if (stmt instanceof polyglot.ast.Branch) {
createBranch((polyglot.ast.Branch)stmt);
}
else if (stmt instanceof polyglot.ast.ConstructorCall) {
createConstructorCall((polyglot.ast.ConstructorCall)stmt);
}
else if (stmt instanceof polyglot.ast.Empty) {
// do nothing empty stmt
}
else if (stmt instanceof polyglot.ast.Throw) {
createThrow((polyglot.ast.Throw)stmt);
}
else if (stmt instanceof polyglot.ast.Try) {
createTry((polyglot.ast.Try)stmt);
}
else if (stmt instanceof polyglot.ast.Labeled) {
createLabeled((polyglot.ast.Labeled)stmt);
}
else if (stmt instanceof polyglot.ast.Synchronized) {
createSynchronized((polyglot.ast.Synchronized)stmt);
}
else if (stmt instanceof polyglot.ast.Assert) {
createAssert((polyglot.ast.Assert)stmt);
}
else if (stmt instanceof polyglot.ast.LocalClassDecl) {
createLocalClassDecl((polyglot.ast.LocalClassDecl)stmt);
}
else {
System.out.println("Unhandled Stmt: "+stmt.getClass().toString());
throw new RuntimeException("Unhandled Stmt");
}
}
private boolean needSootIf(soot.Value sootCond){
if (sootCond instanceof soot.jimple.IntConstant){
if (((soot.jimple.IntConstant)sootCond).value == 1){
return false;
}
}
return true;
}
/**
* If Stmts Creation - only add line-number tags to if (the other
* stmts needing tags are created elsewhere
*/
private void createIf(polyglot.ast.If ifExpr){
// handle cond
polyglot.ast.Expr condition = ifExpr.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
// add if
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
if (needIf) {
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
// add consequence
polyglot.ast.Stmt consequence = ifExpr.consequent();
createStmt(consequence);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
// handle alternative
polyglot.ast.Stmt alternative = ifExpr.alternative();
if (alternative != null){
createStmt(alternative);
}
body.getUnits().add(noop2);
}
/**
* While Stmts Creation
*/
private void createWhile(polyglot.ast.While whileStmt){
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
// these are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle body
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
createStmt(whileStmt.body());
body.getUnits().add(noop2);
// handle cond
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
polyglot.ast.Expr condition = whileStmt.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* DoWhile Stmts Creation
*/
private void createDo(polyglot.ast.Do doStmt){
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
// these are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle body
createStmt(doStmt.body());
// handle cond
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
polyglot.ast.Expr condition = doStmt.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addPosTag(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* For Loop Stmts Creation
*/
private void createForLoop(polyglot.ast.For forStmt){
// these ()are for break and continue
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
condControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
// handle for inits
Iterator initsIt = forStmt.inits().iterator();
while (initsIt.hasNext()){
createStmt((polyglot.ast.Stmt)initsIt.next());
}
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
// handle body
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
createStmt(forStmt.body());
// handle continue
body.getUnits().add((soot.jimple.Stmt)(condControlNoop.pop()));
// handle iters
Iterator itersIt = forStmt.iters().iterator();
while (itersIt.hasNext()){
createStmt((polyglot.ast.Stmt)itersIt.next());
}
body.getUnits().add(noop2);
// handle cond
polyglot.ast.Expr condition = forStmt.cond();
if (condition != null) {
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newNeExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
// add cond
body.getUnits().add(ifStmt);
// add line and pos tags
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
Util.addLnPosTags(ifStmt, condition.position());
}
else {
soot.jimple.GotoStmt gotoIf = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(gotoIf);
}
}
else {
soot.jimple.Stmt goto2 = soot.jimple.Jimple.v().newGotoStmt(noop1);
body.getUnits().add(goto2);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* Local Decl Creation
*/
private void createLocalDecl(polyglot.ast.LocalDecl localDecl) {
String name = localDecl.name();
polyglot.types.LocalInstance localInst = localDecl.localInstance();
soot.Value lhs = createLocal(localInst);
polyglot.ast.Expr expr = localDecl.init();
if (expr != null) {
soot.Value rhs;
if (expr instanceof polyglot.ast.ArrayInit){
rhs = getArrayInitLocal((polyglot.ast.ArrayInit)expr, localInst.type());
}
else {
rhs = createExpr(expr);
}
if (rhs instanceof soot.jimple.ConditionExpr) {
rhs = handleCondBinExpr((soot.jimple.ConditionExpr)rhs);
}
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(lhs, rhs);
body.getUnits().add(stmt);
Util.addLineTag(stmt, localDecl);
Util.addLnPosTags(stmt, localDecl.position());
// this is a special case for position tags
if ( localDecl.position() != null){
Util.addLnPosTags(stmt.getLeftOpBox(), localDecl.position().line(), localDecl.position().endLine(), localDecl.position().endColumn()-name.length(), localDecl.position().endColumn());
if (expr != null){
Util.addLnPosTags(stmt, localDecl.position().line(), expr.position().endLine(), localDecl.position().column(), expr.position().endColumn());
}
else {
Util.addLnPosTags(stmt, localDecl.position().line(), localDecl.position().endLine(), localDecl.position().column(), localDecl.position().endColumn());
}
}
else {
}
if (expr != null){
Util.addLnPosTags(stmt.getRightOpBox(), expr.position());
}
}
}
/**
* Switch Stmts Creation
*/
private void createSwitch(polyglot.ast.Switch switchStmt) {
polyglot.ast.Expr value = switchStmt.expr();
soot.Value sootValue = createExpr(value);
soot.jimple.Stmt defaultTarget = null;
polyglot.ast.Case [] caseArray = new polyglot.ast.Case[switchStmt.elements().size()];
soot.jimple.Stmt [] targetsArray = new soot.jimple.Stmt[switchStmt.elements().size()];
ArrayList targets = new ArrayList();
HashMap targetsMap = new HashMap();
int counter = 0;
Iterator it = switchStmt.elements().iterator();
while (it.hasNext()) {
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
if (!((polyglot.ast.Case)next).isDefault()){
targets.add(noop);
caseArray[counter] = (polyglot.ast.Case)next;
targetsArray[counter] = noop;
counter++;
targetsMap.put(next, noop);
}
else {
defaultTarget = noop;
}
}
}
// sort targets map
int lowIndex = 0;
int highIndex = 0;
for (int i = 0; i < counter; i++) {
for (int j = i+1; j < counter; j++) {
if (caseArray[j].value() < caseArray[i].value()) {
polyglot.ast.Case tempCase = caseArray[i];
soot.jimple.Stmt tempTarget = targetsArray[i];
caseArray[i] = caseArray[j];
targetsArray[i] = targetsArray[j];
caseArray[j] = tempCase;
targetsArray[j] = tempTarget;
}
}
}
ArrayList sortedTargets = new ArrayList();
for (int i = 0; i < counter; i++) {
sortedTargets.add(targetsArray[i]);
}
// deal with default
boolean hasDefaultTarget = true;
if (defaultTarget == null) {
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
defaultTarget = noop;
hasDefaultTarget = false;
}
// lookup or tableswitch
soot.jimple.Stmt sootSwitchStmt;
if (isLookupSwitch(switchStmt)) {
ArrayList values = new ArrayList();
for (int i = 0; i < counter; i++) {
if (!caseArray[i].isDefault()) {
values.add(soot.jimple.IntConstant.v((int)caseArray[i].value()));
}
}
soot.jimple.LookupSwitchStmt lookupStmt = soot.jimple.Jimple.v().newLookupSwitchStmt(sootValue, values, sortedTargets, defaultTarget);
Util.addLnPosTags(lookupStmt.getKeyBox(), value.position());
sootSwitchStmt = lookupStmt;
}
else {
long lowVal = 0;
long highVal = 0;
boolean unknown = true;
it = switchStmt.elements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
if (!((polyglot.ast.Case)next).isDefault()){
long temp = ((polyglot.ast.Case)next).value();
if (unknown){
highVal = temp;
lowVal = temp;
unknown = false;
}
if (temp > highVal) {
highVal = temp;
}
if (temp < lowVal) {
lowVal = temp;
}
}
}
}
soot.jimple.TableSwitchStmt tableStmt = soot.jimple.Jimple.v().newTableSwitchStmt(sootValue, (int)lowVal, (int)highVal, sortedTargets, defaultTarget);
Util.addLnPosTags(tableStmt.getKeyBox(), value.position());
sootSwitchStmt = tableStmt;
}
body.getUnits().add(sootSwitchStmt);
Util.addLnPosTags(sootSwitchStmt, switchStmt.position());
endControlNoop.push(soot.jimple.Jimple.v().newNopStmt());
it = switchStmt.elements().iterator();
Iterator targetsIt = targets.iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
if (!((polyglot.ast.Case)next).isDefault()){
body.getUnits().add((soot.jimple.Stmt)targetsMap.get(next));
}
else {
body.getUnits().add(defaultTarget);
}
}
else {
polyglot.ast.SwitchBlock blockStmt = (polyglot.ast.SwitchBlock)next;
createBlock(blockStmt);
}
}
if (!hasDefaultTarget) {
body.getUnits().add(defaultTarget);
}
body.getUnits().add((soot.jimple.Stmt)(endControlNoop.pop()));
}
/**
* Determine if switch should be lookup or table - this doesn't
* always get the same result as javac
* lookup: non-table
* table: sequential (no gaps)
*/
private boolean isLookupSwitch(polyglot.ast.Switch switchStmt){
int lowest = 0;
int highest = 0;
int counter = 0;
Iterator it = switchStmt.elements().iterator();
while (it.hasNext()){
Object next = it.next();
if (next instanceof polyglot.ast.Case) {
polyglot.ast.Case caseStmt = (polyglot.ast.Case)next;
if (caseStmt.isDefault()) continue;
int caseValue = (int)caseStmt.value();
if (caseValue <= lowest || counter == 0 ) {
lowest = caseValue;
}
if (caseValue >= highest || counter == 0) {
highest = caseValue;
}
counter++;
}
}
if ((counter-1) == (highest - lowest)) return false;
return true;
}
/**
* Branch Stmts Creation
*/
private void createBranch(polyglot.ast.Branch branchStmt){
body.getUnits().add(soot.jimple.Jimple.v().newNopStmt());
if (branchStmt.kind() == polyglot.ast.Branch.BREAK){
if (branchStmt.label() == null) {
soot.jimple.Stmt gotoEndNoop = (soot.jimple.Stmt)endControlNoop.pop();
soot.jimple.Stmt gotoEnd = soot.jimple.Jimple.v().newGotoStmt(gotoEndNoop);
endControlNoop.push(gotoEndNoop);
body.getUnits().add(gotoEnd);
Util.addLnPosTags(gotoEnd, branchStmt.position());
}
else {
soot.jimple.Stmt gotoLabel = soot.jimple.Jimple.v().newGotoStmt((soot.jimple.Stmt)labelBreakMap.get(branchStmt.label()));
body.getUnits().add(gotoLabel);
Util.addLnPosTags(gotoLabel, branchStmt.position());
}
}
else if (branchStmt.kind() == polyglot.ast.Branch.CONTINUE){
if (branchStmt.label() == null) {
soot.jimple.Stmt gotoCondNoop = (soot.jimple.Stmt)condControlNoop.pop();
soot.jimple.Stmt gotoCond = soot.jimple.Jimple.v().newGotoStmt(gotoCondNoop);
condControlNoop.push(gotoCondNoop);
body.getUnits().add(gotoCond);
Util.addLnPosTags(gotoCond, branchStmt.position());
}
else {
soot.jimple.Stmt gotoLabel = soot.jimple.Jimple.v().newGotoStmt((soot.jimple.Stmt)labelContinueMap.get(branchStmt.label()));
body.getUnits().add(gotoLabel);
Util.addLnPosTags(gotoLabel, branchStmt.position());
}
}
}
/**
* Labeled Stmt Creation
*/
private void createLabeled(polyglot.ast.Labeled labeledStmt){
String label = labeledStmt.label();
polyglot.ast.Stmt stmt = labeledStmt.statement();
soot.jimple.Stmt noop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop);
if (labelBreakMap == null) {
labelBreakMap = new HashMap();
}
if (labelContinueMap == null) {
labelContinueMap = new HashMap();
}
labelContinueMap.put(label, noop);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
labelBreakMap.put(label, noop2);
createStmt(stmt);
body.getUnits().add(noop2);
// the idea here is to make a map of labels to the first
// jimple stmt of the stmt (a noop) to be created - so
// there is something to look up for breaks and continues
// with labels
}
/**
* Assert Stmt Creation
*/
private void createAssert(polyglot.ast.Assert assertStmt) {
// check if assertions are disabled
soot.Local testLocal = lg.generateLocal(soot.BooleanType.v());
soot.SootField assertField = body.getMethod().getDeclaringClass().getField("$assertionsDisabled", soot.BooleanType.v());
soot.jimple.FieldRef assertFieldRef = soot.jimple.Jimple.v().newStaticFieldRef(assertField);
soot.jimple.AssignStmt fieldAssign = soot.jimple.Jimple.v().newAssignStmt(testLocal, assertFieldRef);
body.getUnits().add(fieldAssign);
soot.jimple.NopStmt nop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.ConditionExpr cond1 = soot.jimple.Jimple.v().newNeExpr(testLocal, soot.jimple.IntConstant.v(0));
soot.jimple.IfStmt testIf = soot.jimple.Jimple.v().newIfStmt(cond1, nop1);
body.getUnits().add(testIf);
// actual cond test
soot.Value sootCond = createExpr(assertStmt.cond());
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
if (needIf){
// add if
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, nop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt.getConditionBox(), assertStmt.cond().position());
Util.addLnPosTags(ifStmt, assertStmt.position());
}
// assertion failure code
soot.Local failureLocal = lg.generateLocal(soot.RefType.v("java.lang.AssertionError"));
soot.jimple.NewExpr newExpr = soot.jimple.Jimple.v().newNewExpr(soot.RefType.v("java.lang.AssertionError"));
soot.jimple.AssignStmt newAssign = soot.jimple.Jimple.v().newAssignStmt(failureLocal, newExpr);
body.getUnits().add(newAssign);
soot.SootMethod methToInvoke;
ArrayList paramTypes = new ArrayList();
ArrayList params = new ArrayList();
if (assertStmt.errorMessage() != null){
soot.Value errorExpr = createExpr(assertStmt.errorMessage());
soot.Type errorType = errorExpr.getType();
if (errorType instanceof soot.IntType) {
paramTypes.add(soot.IntType.v());
}
else if (errorType instanceof soot.LongType){
paramTypes.add(soot.LongType.v());
}
else if (errorType instanceof soot.FloatType){
paramTypes.add(soot.FloatType.v());
}
else if (errorType instanceof soot.DoubleType){
paramTypes.add(soot.DoubleType.v());
}
else if (errorType instanceof soot.CharType){
paramTypes.add(soot.CharType.v());
}
else if (errorType instanceof soot.BooleanType){
paramTypes.add(soot.BooleanType.v());
}
else if (errorType instanceof soot.ShortType){
paramTypes.add(soot.IntType.v());
}
else if (errorType instanceof soot.ByteType){
paramTypes.add(soot.IntType.v());
}
else {
paramTypes.add(soot.Scene.v().getSootClass("java.lang.Object").getType());
}
params.add(errorExpr);
}
methToInvoke = soot.Scene.v().getSootClass("java.lang.AssertionError").getMethod("<init>", paramTypes, soot.VoidType.v());
soot.jimple.SpecialInvokeExpr invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(failureLocal, methToInvoke, params);
soot.jimple.InvokeStmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(invokeExpr);
body.getUnits().add(invokeStmt);
soot.jimple.ThrowStmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(failureLocal);
body.getUnits().add(throwStmt);
// end
body.getUnits().add(nop1);
}
/**
* Synchronized Stmt Creation
*/
private void createSynchronized(polyglot.ast.Synchronized synchStmt) {
soot.Value sootExpr = createExpr(synchStmt.expr());
soot.jimple.EnterMonitorStmt enterMon = soot.jimple.Jimple.v().newEnterMonitorStmt(sootExpr);
body.getUnits().add(enterMon);
if (monitorStack == null){
monitorStack = new Stack();
}
monitorStack.push(sootExpr);
Util.addLnPosTags(enterMon.getOpBox(), synchStmt.expr().position());
Util.addLnPosTags(enterMon, synchStmt.expr().position());
soot.jimple.Stmt startNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(startNoop);
createBlock(synchStmt.body());
soot.jimple.ExitMonitorStmt exitMon = soot.jimple.Jimple.v().newExitMonitorStmt(sootExpr);
body.getUnits().add(exitMon);
monitorStack.pop();
Util.addLnPosTags(exitMon.getOpBox(), synchStmt.expr().position());
Util.addLnPosTags(exitMon, synchStmt.expr().position());
soot.jimple.Stmt endSynchNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt gotoEnd = soot.jimple.Jimple.v().newGotoStmt(endSynchNoop);
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(endNoop);
body.getUnits().add(gotoEnd);
soot.jimple.Stmt catchAllBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAllBeforeNoop);
// catch all
soot.Local formalLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
// catch
soot.jimple.Stmt catchBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchBeforeNoop);
soot.Local local = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, formalLocal);
body.getUnits().add(assign);
soot.jimple.ExitMonitorStmt catchExitMon = soot.jimple.Jimple.v().newExitMonitorStmt(sootExpr);
body.getUnits().add(catchExitMon);
Util.addLnPosTags(catchExitMon.getOpBox(), synchStmt.expr().position());
soot.jimple.Stmt catchAfterNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAfterNoop);
// throw
soot.jimple.Stmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(local);
body.getUnits().add(throwStmt);
body.getUnits().add(endSynchNoop);
addToExceptionList(startNoop, endNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
addToExceptionList(catchBeforeNoop, catchAfterNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
/**
* Return Stmts Creation
*/
private void createReturn(polyglot.ast.Return retStmt) {
polyglot.ast.Expr expr = retStmt.expr();
soot.Value sootLocal = null;
if (expr != null){
sootLocal = createExpr(expr);
}
// handle monitor exits before return if necessary
if (monitorStack != null){
Stack putBack = new Stack();
while (!monitorStack.isEmpty()){
soot.Local exitVal = (soot.Local)monitorStack.pop();
putBack.push(exitVal);
soot.jimple.ExitMonitorStmt emStmt = soot.jimple.Jimple.v().newExitMonitorStmt(exitVal);
body.getUnits().add(emStmt);
}
while(!putBack.isEmpty()){
monitorStack.push(putBack.pop());
}
}
//handle finally blocks before return if inside try block
if (tryStack != null && !tryStack.isEmpty()){
polyglot.ast.Try currentTry = (polyglot.ast.Try)tryStack.pop();
if (currentTry.finallyBlock() != null){
createBlock(currentTry.finallyBlock());
tryStack.push(currentTry);
// if return stmt contains a return don't create the other return
ReturnStmtChecker rsc = new ReturnStmtChecker();
currentTry.finallyBlock().visit(rsc);
if (rsc.hasRet()){
return;
}
}
else {
tryStack.push(currentTry);
}
}
//handle finally blocks before return if inside catch block
if (catchStack != null && !catchStack.isEmpty()){
polyglot.ast.Try currentTry = (polyglot.ast.Try)catchStack.pop();
if (currentTry.finallyBlock() != null){
createBlock(currentTry.finallyBlock());
catchStack.push(currentTry);
// if return stmt contains a return don't create the other return
ReturnStmtChecker rsc = new ReturnStmtChecker();
currentTry.finallyBlock().visit(rsc);
if (rsc.hasRet()){
return;
}
}
else {
catchStack.push(currentTry);
}
}
// return
if (expr == null) {
soot.jimple.Stmt retStmtVoid = soot.jimple.Jimple.v().newReturnVoidStmt();
body.getUnits().add(retStmtVoid);
Util.addLnPosTags(retStmtVoid, retStmt.position());
}
else {
//soot.Value sootLocal = createExpr(expr);
if (sootLocal instanceof soot.jimple.ConditionExpr) {
sootLocal = handleCondBinExpr((soot.jimple.ConditionExpr)sootLocal);
}
soot.jimple.ReturnStmt retStmtLocal = soot.jimple.Jimple.v().newReturnStmt(sootLocal);
body.getUnits().add(retStmtLocal);
Util.addLnPosTags(retStmtLocal.getOpBox(), expr.position());
Util.addLnPosTags(retStmtLocal, retStmt.position());
}
}
/**
* Throw Stmt Creation
*/
private void createThrow(polyglot.ast.Throw throwStmt){
soot.Value toThrow = createExpr(throwStmt.expr());
soot.jimple.ThrowStmt throwSt = soot.jimple.Jimple.v().newThrowStmt(toThrow);
body.getUnits().add(throwSt);
Util.addLnPosTags(throwSt, throwStmt.position());
Util.addLnPosTags(throwSt.getOpBox(), throwStmt.expr().position());
}
/**
* Try Stmt Creation
*/
private void createTry(polyglot.ast.Try tryStmt) {
polyglot.ast.Block finallyBlock = tryStmt.finallyBlock();
if (finallyBlock == null) {
createTryCatch(tryStmt);
}
else {
createTryCatchFinally(tryStmt);
}
}
/**
* handles try/catch (try/catch/finally is separate for simplicity)
*/
private void createTryCatch(polyglot.ast.Try tryStmt){
// try
polyglot.ast.Block tryBlock = tryStmt.tryBlock();
// this nop is for the fromStmt of try
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
if (tryStack == null){
tryStack = new Stack();
}
tryStack.push(tryStmt);
createBlock(tryBlock);
tryStack.pop();
// this nop is for the toStmt of try
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// create end nop for after entire try/catch
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt tryEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(tryEndGoto);
Iterator it = tryStmt.catchBlocks().iterator();
while (it.hasNext()) {
soot.jimple.Stmt noop3 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop3);
// create catch stmts
polyglot.ast.Catch catchBlock = (polyglot.ast.Catch)it.next();
// create catch ref
createCatchFormal(catchBlock.formal());
if (catchStack == null){
catchStack = new Stack();
}
catchStack.push(tryStmt);
createBlock(catchBlock.body());
catchStack.pop();
soot.jimple.Stmt catchEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchEndGoto);
soot.Type sootType = Util.getSootType(catchBlock.catchType());
addToExceptionList(noop1, noop2, noop3, soot.Scene.v().getSootClass(sootType.toString()));
}
body.getUnits().add(endNoop);
}
/**
* handles try/catch/finally (try/catch is separate for simplicity)
*/
private void createTryCatchFinally(polyglot.ast.Try tryStmt){
HashMap gotoMap = new HashMap();
// try
polyglot.ast.Block tryBlock = tryStmt.tryBlock();
// this nop is for the fromStmt of try
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop1);
if (tryStack == null){
tryStack = new Stack();
}
tryStack.push(tryStmt);
createBlock(tryBlock);
tryStack.pop();
// this nop is for the toStmt of try
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop2);
// create end nop for after entire try/catch
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
// to finally
soot.jimple.Stmt tryGotoFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(tryGotoFinallyNoop);
soot.jimple.Stmt tryFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt tryGotoFinally = soot.jimple.Jimple.v().newGotoStmt(tryFinallyNoop);
body.getUnits().add(tryGotoFinally);
// goto end stmts
soot.jimple.Stmt beforeEndGotoNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeEndGotoNoop);
soot.jimple.Stmt tryEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(tryEndGoto);
gotoMap.put(tryFinallyNoop, beforeEndGotoNoop);
// catch section
soot.jimple.Stmt catchAllBeforeNoop = soot.jimple.Jimple.v().newNopStmt();
Iterator it = tryStmt.catchBlocks().iterator();
while (it.hasNext()) {
soot.jimple.Stmt noop3 = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(noop3);
// create catch stmts
polyglot.ast.Catch catchBlock = (polyglot.ast.Catch)it.next();
// create catch ref
soot.jimple.Stmt catchRefNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchRefNoop);
createCatchFormal(catchBlock.formal());
soot.jimple.Stmt catchStmtsNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchStmtsNoop);
if (catchStack == null){
catchStack = new Stack();
}
catchStack.push(tryStmt);
createBlock(catchBlock.body());
catchStack.pop();
// to finally
soot.jimple.Stmt catchGotoFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchGotoFinallyNoop);
soot.jimple.Stmt catchFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt catchGotoFinally = soot.jimple.Jimple.v().newGotoStmt(catchFinallyNoop);
body.getUnits().add(catchGotoFinally);
// goto end stmts
soot.jimple.Stmt beforeCatchEndGotoNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeCatchEndGotoNoop);
soot.jimple.Stmt catchEndGoto = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchEndGoto);
gotoMap.put(catchFinallyNoop, beforeCatchEndGotoNoop);
soot.Type sootType = Util.getSootType(catchBlock.catchType());
addToExceptionList(noop1, noop2, noop3, soot.Scene.v().getSootClass(sootType.toString()));
addToExceptionList(catchStmtsNoop, beforeCatchEndGotoNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
// catch all ref
soot.Local formalLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
body.getUnits().add(catchAllBeforeNoop);
soot.jimple.CaughtExceptionRef exceptRef = soot.jimple.Jimple.v().newCaughtExceptionRef();
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newIdentityStmt(formalLocal, exceptRef);
body.getUnits().add(stmt);
// catch all assign
soot.jimple.Stmt beforeCatchAllAssignNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(beforeCatchAllAssignNoop);
soot.Local catchAllAssignLocal = lg.generateLocal(soot.RefType.v("java.lang.Throwable"));
soot.jimple.Stmt catchAllAssign = soot.jimple.Jimple.v().newAssignStmt(catchAllAssignLocal, formalLocal);
body.getUnits().add(catchAllAssign);
// catch all finally
soot.jimple.Stmt catchAllFinallyNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt catchAllGotoFinally = soot.jimple.Jimple.v().newGotoStmt(catchAllFinallyNoop);
body.getUnits().add(catchAllGotoFinally);
// catch all throw
soot.jimple.Stmt catchAllBeforeThrowNoop = soot.jimple.Jimple.v().newNopStmt();
body.getUnits().add(catchAllBeforeThrowNoop);
soot.jimple.Stmt throwStmt = soot.jimple.Jimple.v().newThrowStmt(catchAllAssignLocal);
throwStmt.addTag(new soot.tagkit.ThrowCreatedByCompilerTag());
body.getUnits().add(throwStmt);
gotoMap.put(catchAllFinallyNoop, catchAllBeforeThrowNoop);
// catch all goto end
soot.jimple.Stmt catchAllGotoEnd = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(catchAllGotoEnd);
addToExceptionList(beforeCatchAllAssignNoop, catchAllBeforeThrowNoop ,catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
// create finally's
Iterator finallyIt = gotoMap.keySet().iterator();
while (finallyIt.hasNext()) {
soot.jimple.Stmt noopStmt = (soot.jimple.Stmt)finallyIt.next();
body.getUnits().add(noopStmt);
createBlock(tryStmt.finallyBlock());
soot.jimple.Stmt backToStmt = (soot.jimple.Stmt)gotoMap.get(noopStmt);
soot.jimple.Stmt backToGoto = soot.jimple.Jimple.v().newGotoStmt(backToStmt);
body.getUnits().add(backToGoto);
}
body.getUnits().add(endNoop);
addToExceptionList(noop1, beforeEndGotoNoop, catchAllBeforeNoop, soot.Scene.v().getSootClass("java.lang.Throwable"));
}
/**
* add exceptions to a list that gets added at end of method
*/
private void addToExceptionList(soot.jimple.Stmt from, soot.jimple.Stmt to, soot.jimple.Stmt with, soot.SootClass exceptionClass) {
if (exceptionTable == null) {
exceptionTable = new ArrayList();
}
soot.Trap trap = soot.jimple.Jimple.v().newTrap(exceptionClass, from, to, with);
exceptionTable.add(trap);
}
/**
* Expression Creation
*/
private soot.Value createExpr(polyglot.ast.Expr expr){
if (expr instanceof polyglot.ast.Assign) {
return getAssignLocal((polyglot.ast.Assign)expr);
}
else if (expr instanceof polyglot.ast.Lit) {
return createLiteral((polyglot.ast.Lit)expr);
}
else if (expr instanceof polyglot.ast.Local) {
return getLocal((polyglot.ast.Local)expr);
}
else if (expr instanceof polyglot.ast.Binary) {
return getBinaryLocal((polyglot.ast.Binary)expr);
}
else if (expr instanceof polyglot.ast.Unary) {
return getUnaryLocal((polyglot.ast.Unary)expr);
}
else if (expr instanceof polyglot.ast.Cast) {
return getCastLocal((polyglot.ast.Cast)expr);
}
//else if (expr instanceof polyglot.ast.ArrayInit) {
// array init are special and get created elsewhere
else if (expr instanceof polyglot.ast.ArrayAccess) {
return getArrayRefLocal((polyglot.ast.ArrayAccess)expr);
}
else if (expr instanceof polyglot.ast.NewArray) {
return getNewArrayLocal((polyglot.ast.NewArray)expr);
}
else if (expr instanceof polyglot.ast.Call) {
return getCallLocal((polyglot.ast.Call)expr);
}
else if (expr instanceof polyglot.ast.New) {
return getNewLocal((polyglot.ast.New)expr);
}
else if (expr instanceof polyglot.ast.Special) {
return getSpecialLocal((polyglot.ast.Special)expr);
}
else if (expr instanceof polyglot.ast.Instanceof) {
return getInstanceOfLocal((polyglot.ast.Instanceof)expr);
}
else if (expr instanceof polyglot.ast.Conditional) {
return getConditionalLocal((polyglot.ast.Conditional)expr);
}
else if (expr instanceof polyglot.ast.Field) {
return getFieldLocal((polyglot.ast.Field)expr);
}
else {
System.out.println("Expr: "+expr);
throw new RuntimeException("Unhandled Expression: ");
}
}
private soot.Local handlePrivateFieldSet(polyglot.ast.Assign assign){
polyglot.ast.Field fLeft = (polyglot.ast.Field)assign.left();
//soot.Value right = createExpr(assign.right());
// if assign is not = but +=, -=, *=, /=, >>=, >>>-, <<=, %=,
// |= &= or ^= then compute it all into a local first
//if (assign.operator() != polyglot.ast.Assign.ASSIGN){
// in this cas can cast to local (never a string const here
// as it has to be a lhs
soot.Value right;
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
right = getSimpleAssignRightLocal(assign);
}
else if ((assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) && assign.type().toString().equals("java.lang.String")){
right = getStringConcatAssignRightLocal(assign);
}
else {
soot.Local leftLocal = (soot.Local)getFieldLocal(fLeft);
right = getAssignRightLocal(assign, leftLocal);
}
//else {
// right = createExpr(assign.right());
soot.SootClass containClass = ((soot.RefType)Util.getSootType(fLeft.target().type())).getSootClass();
soot.SootMethod methToUse = addSetAccessMeth(containClass, fLeft, right);
ArrayList params = new ArrayList();
if (!fLeft.flags().isStatic()){
// this is the this ref if needed
params.add(getThis(Util.getSootType(fLeft.target().type())));
}
params.add(right);
soot.jimple.InvokeExpr invoke = soot.jimple.Jimple.v().newStaticInvokeExpr(methToUse, params);
soot.Local retLocal = lg.generateLocal(right.getType());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invoke);
body.getUnits().add(assignStmt);
return retLocal;
}
private soot.SootMethod addSetAccessMeth(soot.SootClass conClass, polyglot.ast.Field field, soot.Value param){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
if (!field.flags().isStatic()){
// add this param type
paramTypes.add(Util.getSootType(field.target().type()));
}
paramTypes.add(param.getType());
soot.SootMethod meth = new soot.SootMethod(name, paramTypes, param.getType(), soot.Modifier.STATIC);
PrivateFieldSetMethodSource pfsms = new PrivateFieldSetMethodSource();
pfsms.fieldName(field.name());
pfsms.fieldType(Util.getSootType(field.type()));
pfsms.setFieldInst(field.fieldInstance());
conClass.addMethod(meth);
meth.setActiveBody(pfsms.getBody(meth, null));
return meth;
}
private soot.Value getAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
return getSimpleAssignRightLocal(assign);
}
else if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN && assign.type().toString().equals("java.lang.String")){
return getStringConcatAssignRightLocal(assign);
}
else {
return getComplexAssignRightLocal(assign, leftLocal);
}
}
private soot.Value getSimpleAssignRightLocal(polyglot.ast.Assign assign){
soot.Value right = createExpr(assign.right());
if (right instanceof soot.jimple.ConditionExpr) {
right = handleCondBinExpr((soot.jimple.ConditionExpr)right);
}
return right;
}
private soot.Local getStringConcatAssignRightLocal(polyglot.ast.Assign assign){
soot.Local sb = (soot.Local)createStringBuffer(assign);
generateAppends(assign.left(), sb);
generateAppends(assign.right(), sb);
soot.Local rLocal = createToString(sb, assign);
return rLocal;
}
private soot.Local getComplexAssignRightLocal(polyglot.ast.Assign assign, soot.Local leftLocal){
soot.Value right = createExpr(assign.right());
if (right instanceof soot.jimple.ConditionExpr) {
right = handleCondBinExpr((soot.jimple.ConditionExpr)right);
}
soot.jimple.BinopExpr binop = null;
if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) {
binop = soot.jimple.Jimple.v().newAddExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SUB_ASSIGN){
binop = soot.jimple.Jimple.v().newSubExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.MUL_ASSIGN) {
binop = soot.jimple.Jimple.v().newMulExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.DIV_ASSIGN) {
binop = soot.jimple.Jimple.v().newDivExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.MOD_ASSIGN) {
binop = soot.jimple.Jimple.v().newRemExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SHL_ASSIGN) {
binop = soot.jimple.Jimple.v().newShlExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.SHR_ASSIGN) {
binop = soot.jimple.Jimple.v().newShrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.USHR_ASSIGN) {
binop = soot.jimple.Jimple.v().newUshrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_AND_ASSIGN) {
binop = soot.jimple.Jimple.v().newAndExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_OR_ASSIGN) {
binop = soot.jimple.Jimple.v().newOrExpr(leftLocal, right);
}
else if (assign.operator() == polyglot.ast.Assign.BIT_XOR_ASSIGN) {
binop = soot.jimple.Jimple.v().newXorExpr(leftLocal, right);
}
soot.Local retLocal = lg.generateLocal(leftLocal.getType());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, binop);
body.getUnits().add(assignStmt);
Util.addLnPosTags(binop.getOp1Box(), assign.left().position());
Util.addLnPosTags(binop.getOp2Box(), assign.right().position());
return retLocal;
}
private soot.Value getSimpleAssignLocal(polyglot.ast.Assign assign){
soot.jimple.AssignStmt stmt;
soot.Value left = createLHS(assign.left());
soot.Value right = getSimpleAssignRightLocal(assign);
stmt = soot.jimple.Jimple.v().newAssignStmt(left, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (left instanceof soot.Local){
return left;
}
else {
return right;
}
}
private soot.Value getStrConAssignLocal(polyglot.ast.Assign assign){
soot.jimple.AssignStmt stmt;
soot.Value left = createLHS(assign.left());
soot.Value right = getStringConcatAssignRightLocal(assign);
stmt = soot.jimple.Jimple.v().newAssignStmt(left, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (left instanceof soot.Local){
return left;
}
else {
return right;
}
}
/**
* Assign Expression Creation
*/
private soot.Value getAssignLocal(polyglot.ast.Assign assign) {
// handle private access field assigns
HashMap accessMap = ((PolyglotMethodSource)body.getMethod().getSource()).getPrivateAccessMap();
if ((assign.left() instanceof polyglot.ast.Field) && (accessMap != null) && accessMap.containsKey(((polyglot.ast.Field)assign.left()).fieldInstance())){
return handlePrivateFieldSet(assign);
}
if (assign.operator() == polyglot.ast.Assign.ASSIGN){
return getSimpleAssignLocal(assign);
}
if ((assign.operator() == polyglot.ast.Assign.ADD_ASSIGN) && assign.type().toString().equals("java.lang.String")){
return getStrConAssignLocal(assign);
}
soot.jimple.AssignStmt stmt;
soot.Value left = createLHS(assign.left());
soot.Local leftLocal;
if (left instanceof soot.Local){
leftLocal = (soot.Local)left;
}
else {
leftLocal = lg.generateLocal(left.getType());
stmt = soot.jimple.Jimple.v().newAssignStmt(leftLocal, left);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
}
soot.Value right = getAssignRightLocal(assign, leftLocal);
stmt = soot.jimple.Jimple.v().newAssignStmt(leftLocal, right);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
if (!(left instanceof soot.Local)) {
stmt = soot.jimple.Jimple.v().newAssignStmt(left, leftLocal);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
}
return leftLocal;
/*if (binop != null){
Util.addLnPosTags(binop.getOp1Box(), assign.left().position());
Util.addLnPosTags(binop.getOp2Box(), assign.right().position());
}*/
/*Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
Util.addLnPosTags(stmt, assign.position());
if (!(left instanceof soot.Local)) {
stmt = soot.jimple.Jimple.v().newAssignStmt(left, leftLocal);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, assign.position());
Util.addLnPosTags(stmt.getLeftOpBox(), assign.left().position());
Util.addLnPosTags(stmt.getRightOpBox(), assign.right().position());
}
return leftLocal;*/
}
/**
* Field Expression Creation - LHS
*/
private soot.Value getFieldLocalLeft(polyglot.ast.Field field){
polyglot.ast.Receiver receiver = field.target();
if ((field.name().equals("length")) && (receiver.type() instanceof polyglot.types.ArrayType)){
return getSpecialArrayLengthLocal(field);
}
else {
return getFieldRef(field);
}
}
/**
* Field Expression Creation
*/
private soot.Value getFieldLocal(polyglot.ast.Field field){
polyglot.ast.Receiver receiver = field.target();
soot.javaToJimple.PolyglotMethodSource ms = (soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource();
if ((field.name().equals("length")) && (receiver.type() instanceof polyglot.types.ArrayType)){
return getSpecialArrayLengthLocal(field);
}
else if (field.name().equals("class")){
throw new RuntimeException("Should go through ClassLit");
}
else if ((ms.getPrivateAccessMap() != null) && (ms.getPrivateAccessMap().containsKey(field.fieldInstance()))){
return getPrivateAccessFieldLocal(field);
}
if ((field.target() instanceof polyglot.ast.Special) && (((polyglot.ast.Special)field.target()).kind() == polyglot.ast.Special.SUPER) && (((polyglot.ast.Special)field.target()).qualifier() != null)){
return getSpecialSuperQualifierLocal(field);
}
else if (shouldReturnConstant(field)){
return getReturnConstant(field);
// in this case don't return fieldRef but a string constant
}
else {
soot.jimple.FieldRef fieldRef = getFieldRef(field);
soot.Local baseLocal = generateLocal(field.type());
soot.jimple.AssignStmt fieldAssignStmt = soot.jimple.Jimple.v().newAssignStmt(baseLocal, fieldRef);
body.getUnits().add(fieldAssignStmt);
Util.addLnPosTags(fieldAssignStmt, field.position());
return baseLocal;
}
}
private soot.jimple.Constant getReturnConstant(polyglot.ast.Field field){
return soot.jimple.StringConstant.v((String)field.constantValue());
}
private boolean shouldReturnConstant(polyglot.ast.Field field){
if (field.fieldInstance().isConstant() && (field.fieldInstance().constantValue() instanceof String)){
return true;
}
return false;
}
/**
* creates a field ref
*/
private soot.jimple.FieldRef getFieldRef(polyglot.ast.Field field) {
soot.SootClass receiverClass = ((soot.RefType)Util.getSootType(field.fieldInstance().container())).getSootClass();
soot.SootField receiverField = receiverClass.getField(field.name(), Util.getSootType(field.type()));
soot.jimple.FieldRef fieldRef;
if (field.fieldInstance().flags().isStatic()) {
fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(receiverField);
}
else {
soot.Local base;
base = (soot.Local)getBaseLocal(field.target());
fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(base, receiverField);
}
if (field.target() instanceof polyglot.ast.Local && fieldRef instanceof soot.jimple.InstanceFieldRef){
Util.addLnPosTags(((soot.jimple.InstanceFieldRef)fieldRef).getBaseBox(), field.target().position());
}
return fieldRef;
}
/**
* For Inner Classes - to access private fields of their outer class
*/
private soot.Local getPrivateAccessFieldLocal(polyglot.ast.Field field) {
HashMap paMap = ((soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource()).getPrivateAccessMap();
soot.SootMethod toInvoke = (soot.SootMethod)paMap.get(field.fieldInstance());
ArrayList params = new ArrayList();
if (!field.fieldInstance().flags().isStatic()) {
params.add((soot.Local)getBaseLocal(field.target()));
}
return Util.getPrivateAccessFieldInvoke(toInvoke, params, body, lg);
}
/**
* To get the local for the special .class literal
*/
private soot.Local getSpecialClassLitLocal(polyglot.ast.ClassLit lit) {
if (lit.typeNode().type().isPrimitive()){
polyglot.types.PrimitiveType primType = (polyglot.types.PrimitiveType)lit.typeNode().type();
soot.Local retLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.SootField primField = null;
if (primType.isBoolean()){
primField = soot.Scene.v().getSootClass("java.lang.Boolean").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isByte()){
primField = soot.Scene.v().getSootClass("java.lang.Byte").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isChar()){
primField = soot.Scene.v().getSootClass("java.lang.Character").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isDouble()){
primField = soot.Scene.v().getSootClass("java.lang.Double").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isFloat()){
primField = soot.Scene.v().getSootClass("java.lang.Float").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isInt()){
primField = soot.Scene.v().getSootClass("java.lang.Integer").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isLong()){
primField = soot.Scene.v().getSootClass("java.lang.Long").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isShort()){
primField = soot.Scene.v().getSootClass("java.lang.Short").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
else if (primType.isVoid()){
primField = soot.Scene.v().getSootClass("java.lang.Void").getField("TYPE", soot.RefType.v("java.lang.Class"));
}
soot.jimple.StaticFieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(primField);
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, fieldRef);
body.getUnits().add(assignStmt);
return retLocal;
}
else {
// this class
soot.SootClass thisClass = body.getMethod().getDeclaringClass();
String fieldName = Util.getFieldNameForClassLit(lit.typeNode().type());
soot.Type fieldType = soot.RefType.v("java.lang.Class");
soot.Local fieldLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.SootField sootField = null;
if (thisClass.isInterface()){
HashMap specialAnonMap = InitialResolver.v().specialAnonMap();
if ((specialAnonMap != null) && (specialAnonMap.containsKey(thisClass))){
soot.SootClass specialClass = (soot.SootClass)specialAnonMap.get(thisClass);
sootField = specialClass.getField(fieldName, fieldType);
}
else {
throw new RuntimeException("Class is interface so it must have an anon class to handle class lits but its anon class cannot be found.");
}
}
else {
sootField = thisClass.getField(fieldName, fieldType);
}
soot.jimple.StaticFieldRef fieldRef = soot.jimple.Jimple.v().newStaticFieldRef(sootField);
soot.jimple.Stmt fieldAssign = soot.jimple.Jimple.v().newAssignStmt(fieldLocal, fieldRef);
body.getUnits().add(fieldAssign);
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Expr neExpr = soot.jimple.Jimple.v().newNeExpr(fieldLocal, soot.jimple.NullConstant.v());
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(neExpr, noop1);
body.getUnits().add(ifStmt);
ArrayList paramTypes = new ArrayList();
paramTypes.add(soot.RefType.v("java.lang.String"));
soot.SootMethod invokeMeth = null;
if (thisClass.isInterface()){
HashMap specialAnonMap = InitialResolver.v().specialAnonMap();
if ((specialAnonMap != null) && (specialAnonMap.containsKey(thisClass))){
soot.SootClass specialClass = (soot.SootClass)specialAnonMap.get(thisClass);
invokeMeth = specialClass.getMethod("class$", paramTypes, soot.RefType.v("java.lang.Class"));
}
else {
throw new RuntimeException("Class is interface so it must have an anon class to handle class lits but its anon class cannot be found.");
}
}
else {
invokeMeth = thisClass.getMethod("class$", paramTypes, soot.RefType.v("java.lang.Class"));
}
ArrayList params = new ArrayList();
params.add(soot.jimple.StringConstant.v(Util.getParamNameForClassLit(lit.typeNode().type())));
soot.jimple.Expr classInvoke = soot.jimple.Jimple.v().newStaticInvokeExpr(invokeMeth, params);
soot.Local methLocal = lg.generateLocal(soot.RefType.v("java.lang.Class"));
soot.jimple.Stmt invokeAssign = soot.jimple.Jimple.v().newAssignStmt(methLocal, classInvoke);
body.getUnits().add(invokeAssign);
soot.jimple.Stmt assignField = soot.jimple.Jimple.v().newAssignStmt(fieldRef, methLocal);
body.getUnits().add(assignField);
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
fieldAssign = soot.jimple.Jimple.v().newAssignStmt(methLocal, fieldRef);
body.getUnits().add(fieldAssign);
body.getUnits().add(noop2);
return methLocal;
}
}
/**
* Array Length local for example a.length w/o brackets gets length
* of array
*/
private soot.Local getSpecialArrayLengthLocal(polyglot.ast.Field field) {
soot.Local localField;
polyglot.ast.Receiver receiver = field.target();
if (receiver instanceof polyglot.ast.Local) {
localField = getLocal((polyglot.ast.Local)receiver);
}
else if (receiver instanceof polyglot.ast.Expr){
localField = (soot.Local)createExpr((polyglot.ast.Expr)receiver);
}
else {
localField = generateLocal(receiver.type());
}
soot.jimple.LengthExpr lengthExpr = soot.jimple.Jimple.v().newLengthExpr(localField);
soot.Local retLocal = lg.generateLocal(soot.IntType.v());
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, lengthExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, field.position());
Util.addLnPosTags(lengthExpr.getOpBox(), field.target().position());
return retLocal;
}
/**
* Binary Expression Creation
*/
private soot.Value getBinaryLocal(polyglot.ast.Binary binary) {
soot.Value rhs;
if (binary.operator() == polyglot.ast.Binary.COND_AND) {
return createCondAnd(binary);
}
if (binary.operator() == polyglot.ast.Binary.COND_OR) {
return createCondOr(binary);
}
if (binary.type().toString().equals("java.lang.String")){
if (areAllStringLits(binary)){
String result = createStringConstant(binary);
return soot.jimple.StringConstant.v(result);
}
else {
soot.Local sb = (soot.Local)createStringBuffer(binary);
generateAppends(binary.left(), sb);
generateAppends(binary.right(), sb);
return createToString(sb, binary);
}
}
soot.Value lVal = createExpr(binary.left());
soot.Value rVal = createExpr(binary.right());
if (isComparisonBinary(binary.operator())) {
rhs = getBinaryComparisonExpr(lVal, rVal, binary.operator());
}
else {
rhs = getBinaryExpr(lVal, rVal, binary.operator());
}
if (rhs instanceof soot.jimple.BinopExpr) {
Util.addLnPosTags(((soot.jimple.BinopExpr)rhs).getOp1Box(), binary.left().position());
Util.addLnPosTags(((soot.jimple.BinopExpr)rhs).getOp2Box(), binary.right().position());
}
if (rhs instanceof soot.jimple.ConditionExpr) {
return rhs;
}
soot.Local lhs = generateLocal(binary.type());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(lhs, rhs);
body.getUnits().add(assignStmt);
Util.addLnPosTags(assignStmt.getRightOpBox(), binary.position());
return lhs;
}
private boolean areAllStringLits(polyglot.ast.Node node){
if (node instanceof polyglot.ast.StringLit) return true;
else if ( node instanceof polyglot.ast.Field) {
if (shouldReturnConstant((polyglot.ast.Field)node)) return true;
else return false;
}
else if (node instanceof polyglot.ast.Binary){
if (areAllStringLitsBinary((polyglot.ast.Binary)node)) return true;
return false;
}
return false;
}
private boolean areAllStringLitsBinary(polyglot.ast.Binary binary){
if (areAllStringLits(binary.left()) && areAllStringLits(binary.right())) return true;
else return false;
}
private String createStringConstant(polyglot.ast.Node node){
String s = null;
if (node instanceof polyglot.ast.StringLit){
s = ((polyglot.ast.StringLit)node).value();
}
else if (node instanceof polyglot.ast.Field){
s = (String)((polyglot.ast.Field)node).fieldInstance().constantValue();
}
else if (node instanceof polyglot.ast.Binary){
s = createStringConstantBinary((polyglot.ast.Binary)node);
}
else {
throw new RuntimeException("No other string constant folding done");
}
return s;
}
private String createStringConstantBinary(polyglot.ast.Binary binary){
String s = createStringConstant(binary.left())+ createStringConstant(binary.right());
return s;
}
private boolean isComparisonBinary(polyglot.ast.Binary.Operator op) {
if ((op == polyglot.ast.Binary.EQ) || (op == polyglot.ast.Binary.NE) ||
(op == polyglot.ast.Binary.GE) || (op == polyglot.ast.Binary.GT) ||
(op == polyglot.ast.Binary.LE) || (op == polyglot.ast.Binary.LT)) {
return true;
}
else {
return false;
}
}
/**
* Creates a binary expression that is not a comparison
*/
private soot.Value getBinaryExpr(soot.Value lVal, soot.Value rVal, polyglot.ast.Binary.Operator operator){
soot.Value rValue = null;
if (lVal instanceof soot.jimple.ConditionExpr) {
lVal = handleCondBinExpr((soot.jimple.ConditionExpr)lVal);
}
if (rVal instanceof soot.jimple.ConditionExpr) {
rVal = handleCondBinExpr((soot.jimple.ConditionExpr)rVal);
}
if (operator == polyglot.ast.Binary.ADD){
rValue = soot.jimple.Jimple.v().newAddExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.SUB){
rValue = soot.jimple.Jimple.v().newSubExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.MUL){
rValue = soot.jimple.Jimple.v().newMulExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.DIV){
rValue = soot.jimple.Jimple.v().newDivExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.SHR){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newShrExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newShrExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.USHR){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newUshrExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newUshrExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.SHL){
if (rVal.getType().equals(soot.LongType.v())){
soot.Local intVal = lg.generateLocal(soot.IntType.v());
soot.jimple.CastExpr castExpr = soot.jimple.Jimple.v().newCastExpr(rVal, soot.IntType.v());
soot.jimple.AssignStmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(intVal, castExpr);
body.getUnits().add(assignStmt);
rValue = soot.jimple.Jimple.v().newShlExpr(lVal, intVal);
}
else {
rValue = soot.jimple.Jimple.v().newShlExpr(lVal, rVal);
}
}
else if (operator == polyglot.ast.Binary.BIT_AND){
rValue = soot.jimple.Jimple.v().newAndExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.BIT_OR){
rValue = soot.jimple.Jimple.v().newOrExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.BIT_XOR){
rValue = soot.jimple.Jimple.v().newXorExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.MOD){
rValue = soot.jimple.Jimple.v().newRemExpr(lVal, rVal);
}
else {
throw new RuntimeException("Binary not yet handled!");
}
return rValue;
}
/**
* Creates a binary expr that is a comparison
*/
private soot.Value getBinaryComparisonExpr(soot.Value lVal, soot.Value rVal, polyglot.ast.Binary.Operator operator) {
soot.Value rValue;
if (operator == polyglot.ast.Binary.EQ){
rValue = soot.jimple.Jimple.v().newEqExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.GE){
rValue = soot.jimple.Jimple.v().newGeExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.GT){
rValue = soot.jimple.Jimple.v().newGtExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.LE){
rValue = soot.jimple.Jimple.v().newLeExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.LT){
rValue = soot.jimple.Jimple.v().newLtExpr(lVal, rVal);
}
else if (operator == polyglot.ast.Binary.NE){
rValue = soot.jimple.Jimple.v().newNeExpr(lVal, rVal);
}
else {
throw new RuntimeException("Unknown Comparison Expr");
}
return rValue;
}
/**
* in bytecode and Jimple the conditions in conditional binary
* expressions are often reversed
*/
private soot.Value reverseCondition(soot.jimple.ConditionExpr cond) {
soot.jimple.ConditionExpr newExpr;
if (cond instanceof soot.jimple.EqExpr) {
newExpr = soot.jimple.Jimple.v().newNeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.NeExpr) {
newExpr = soot.jimple.Jimple.v().newEqExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.GtExpr) {
newExpr = soot.jimple.Jimple.v().newLeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.GeExpr) {
newExpr = soot.jimple.Jimple.v().newLtExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.LtExpr) {
newExpr = soot.jimple.Jimple.v().newGeExpr(cond.getOp1(), cond.getOp2());
}
else if (cond instanceof soot.jimple.LeExpr) {
newExpr = soot.jimple.Jimple.v().newGtExpr(cond.getOp1(), cond.getOp2());
}
else {
throw new RuntimeException("Unknown Condition Expr");
}
newExpr.getOp1Box().addAllTagsOf(cond.getOp1Box());
newExpr.getOp2Box().addAllTagsOf(cond.getOp2Box());
return newExpr;
}
/**
* Special conditions for doubles and floats and longs
*/
private soot.Value handleDFLCond(soot.jimple.ConditionExpr cond){
soot.Local result = lg.generateLocal(soot.ByteType.v());
soot.jimple.Expr cmExpr = null;
if (isDouble(cond.getOp1()) || isDouble(cond.getOp2()) || isFloat(cond.getOp1()) || isFloat(cond.getOp2())) {
// use cmpg and cmpl
if ((cond instanceof soot.jimple.GeExpr) || (cond instanceof soot.jimple.GtExpr)) {
// use cmpg
cmExpr = soot.jimple.Jimple.v().newCmpgExpr(cond.getOp1(), cond.getOp2());
}
else {
// use cmpl
cmExpr = soot.jimple.Jimple.v().newCmplExpr(cond.getOp1(), cond.getOp2());
}
}
else if (isLong(cond.getOp1()) || isLong(cond.getOp2())) {
// use cmp
cmExpr = soot.jimple.Jimple.v().newCmpExpr(cond.getOp1(), cond.getOp2());
}
else {
return cond;
}
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(result, cmExpr);
body.getUnits().add(assign);
if (cond instanceof soot.jimple.EqExpr){
cond = soot.jimple.Jimple.v().newEqExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.GeExpr){
cond = soot.jimple.Jimple.v().newGeExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.GtExpr){
cond = soot.jimple.Jimple.v().newGtExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.LeExpr){
cond = soot.jimple.Jimple.v().newLeExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.LtExpr){
cond = soot.jimple.Jimple.v().newLtExpr(result, soot.jimple.IntConstant.v(0));
}
else if (cond instanceof soot.jimple.NeExpr){
cond = soot.jimple.Jimple.v().newNeExpr(result, soot.jimple.IntConstant.v(0));
}
else {
throw new RuntimeException("Unknown Comparison Expr");
}
return cond;
}
private boolean isDouble(soot.Value val) {
if (val.getType() instanceof soot.DoubleType) return true;
return false;
}
private boolean isFloat(soot.Value val) {
if (val.getType() instanceof soot.FloatType) return true;
return false;
}
private boolean isLong(soot.Value val) {
if (val.getType() instanceof soot.LongType) return true;
return false;
}
/**
* Creates a conitional AND expr
*/
private soot.Local createCondAnd(polyglot.ast.Binary binary) {
soot.Local retLocal = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value lVal = createExpr(binary.left());
boolean leftNeedIf = needSootIf(lVal);
if (!(lVal instanceof soot.jimple.ConditionExpr)) {
lVal = soot.jimple.Jimple.v().newEqExpr(lVal, soot.jimple.IntConstant.v(0));
}
else {
lVal = reverseCondition((soot.jimple.ConditionExpr)lVal);
lVal = handleDFLCond((soot.jimple.ConditionExpr)lVal);
}
if (leftNeedIf){
soot.jimple.IfStmt ifLeft = soot.jimple.Jimple.v().newIfStmt(lVal, noop1);
body.getUnits().add(ifLeft);
Util.addLnPosTags(ifLeft.getConditionBox(), binary.left().position());
}
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.Value rVal = createExpr(binary.right());
boolean rightNeedIf = needSootIf(rVal);
if (!(rVal instanceof soot.jimple.ConditionExpr)) {
rVal = soot.jimple.Jimple.v().newEqExpr(rVal, soot.jimple.IntConstant.v(0));
}
else {
rVal = reverseCondition((soot.jimple.ConditionExpr)rVal);
rVal = handleDFLCond((soot.jimple.ConditionExpr)rVal);
}
if (rightNeedIf){
soot.jimple.IfStmt ifRight = soot.jimple.Jimple.v().newIfStmt(rVal, noop1);
body.getUnits().add(ifRight);
Util.addLnPosTags(ifRight.getConditionBox(), binary.right().position());
}
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(1));
body.getUnits().add(assign1);
soot.jimple.Stmt gotoEnd1 = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(gotoEnd1);
body.getUnits().add(noop1);
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(0));
body.getUnits().add(assign2);
body.getUnits().add(endNoop);
return retLocal;
}
/**
* Creates a conditional OR expr
*/
private soot.Local createCondOr(polyglot.ast.Binary binary) {
soot.Local retLocal = lg.generateLocal(soot.BooleanType.v());
//end
soot.jimple.Stmt endNoop = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value lVal = createExpr(binary.left());
boolean leftNeedIf = needSootIf(lVal);
if (!(lVal instanceof soot.jimple.ConditionExpr)) {
lVal = soot.jimple.Jimple.v().newEqExpr(lVal, soot.jimple.IntConstant.v(1));
}
else {
lVal = handleDFLCond((soot.jimple.ConditionExpr)lVal);
}
if (leftNeedIf){
soot.jimple.IfStmt ifLeft = soot.jimple.Jimple.v().newIfStmt(lVal, noop1);
body.getUnits().add(ifLeft);
Util.addLnPosTags(ifLeft, binary.left().position());
Util.addLnPosTags(ifLeft.getConditionBox(), binary.left().position());
}
soot.Value rVal = createExpr(binary.right());
boolean rightNeedIf = needSootIf(rVal);
if (!(rVal instanceof soot.jimple.ConditionExpr)) {
rVal = soot.jimple.Jimple.v().newEqExpr(rVal, soot.jimple.IntConstant.v(1));
}
else {
rVal = handleDFLCond((soot.jimple.ConditionExpr)rVal);
}
if (rightNeedIf){
soot.jimple.IfStmt ifRight = soot.jimple.Jimple.v().newIfStmt(rVal, noop1);
body.getUnits().add(ifRight);
Util.addLnPosTags(ifRight, binary.right().position());
Util.addLnPosTags(ifRight.getConditionBox(), binary.right().position());
}
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(0));
body.getUnits().add(assign2);
Util.addLnPosTags(assign2, binary.position());
soot.jimple.Stmt gotoEnd2 = soot.jimple.Jimple.v().newGotoStmt(endNoop);
body.getUnits().add(gotoEnd2);
body.getUnits().add(noop1);
soot.jimple.Stmt assign3 = soot.jimple.Jimple.v().newAssignStmt(retLocal, soot.jimple.IntConstant.v(1));
body.getUnits().add(assign3);
Util.addLnPosTags(assign3, binary.position());
body.getUnits().add(endNoop);
return retLocal;
}
private soot.Local handleCondBinExpr(soot.jimple.ConditionExpr condExpr) {
soot.Local boolLocal = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.Value newVal;
newVal = handleDFLCond(condExpr);
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(newVal, noop1);
body.getUnits().add(ifStmt);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(boolLocal, soot.jimple.IntConstant.v(0)));
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(boolLocal, soot.jimple.IntConstant.v(1)));
body.getUnits().add(noop2);
return boolLocal;
}
private soot.Local createStringBuffer(polyglot.ast.Expr expr){
// create and add one string buffer
soot.Local local = lg.generateLocal(soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.NewExpr newExpr = soot.jimple.Jimple.v().newNewExpr(soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, newExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
soot.SootClass classToInvoke1 = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethod methodToInvoke1 = getMethodFromClass(classToInvoke1, "<init>", new ArrayList(), soot.VoidType.v());
soot.jimple.SpecialInvokeExpr invoke = soot.jimple.Jimple.v().newSpecialInvokeExpr(local, methodToInvoke1);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(invoke);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, expr.position());
return local;
}
private soot.Local createToString(soot.Local sb, polyglot.ast.Expr expr){
// invoke toString on local (type StringBuffer)
soot.Local newString = lg.generateLocal(soot.RefType.v("java.lang.String"));
soot.SootClass classToInvoke2 = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethod methodToInvoke2 = getMethodFromClass(classToInvoke2, "toString", new ArrayList(), soot.RefType.v("java.lang.String"));
soot.jimple.VirtualInvokeExpr toStringInvoke = soot.jimple.Jimple.v().newVirtualInvokeExpr(sb, methodToInvoke2);
soot.jimple.Stmt lastAssign = soot.jimple.Jimple.v().newAssignStmt(newString, toStringInvoke);
body.getUnits().add(lastAssign);
Util.addLnPosTags(lastAssign, expr.position());
return newString;
}
private boolean isStringConcat(polyglot.ast.Expr expr){
if (expr instanceof polyglot.ast.Binary) {
polyglot.ast.Binary bin = (polyglot.ast.Binary)expr;
if (bin.operator() == polyglot.ast.Binary.ADD){
if (bin.type().toString().equals("java.lang.String")) return true;
return false;
}
return false;
}
else if (expr instanceof polyglot.ast.Assign) {
polyglot.ast.Assign assign = (polyglot.ast.Assign)expr;
if (assign.operator() == polyglot.ast.Assign.ADD_ASSIGN){
if (assign.type().toString().equals("java.lang.String")) return true;
return false;
}
return false;
}
return false;
}
/**
* Generates one part of a concatenation String
*/
private void generateAppends(polyglot.ast.Expr expr, soot.Local sb) {
if (isStringConcat(expr)){
if (expr instanceof polyglot.ast.Binary){
generateAppends(((polyglot.ast.Binary)expr).left(), sb);
generateAppends(((polyglot.ast.Binary)expr).right(), sb);
}
else {
generateAppends(((polyglot.ast.Assign)expr).left(), sb);
generateAppends(((polyglot.ast.Assign)expr).right(), sb);
}
}
else {
soot.Value toApp = createExpr(expr);
soot.Type appendType = null;
if (toApp instanceof soot.jimple.StringConstant) {
appendType = soot.RefType.v("java.lang.String");
}
else if (toApp instanceof soot.jimple.Constant) {
appendType = toApp.getType();
}
else if (toApp instanceof soot.Local) {
if (((soot.Local)toApp).getType() instanceof soot.PrimType) {
appendType = ((soot.Local)toApp).getType();
}
else if (((soot.Local)toApp).getType() instanceof soot.RefType) {
if (((soot.Local)toApp).getType().toString().equals("java.lang.String")){
appendType = soot.RefType.v("java.lang.String");
}
else if (((soot.Local)toApp).getType().toString().equals("java.lang.StringBuffer")){
appendType = soot.RefType.v("java.lang.StringBuffer");
}
else{
appendType = soot.RefType.v("java.lang.Object");
}
}
else {
// this is for arrays
appendType = soot.RefType.v("java.lang.Object");
}
}
else if (toApp instanceof soot.jimple.ConditionExpr) {
toApp = handleCondBinExpr((soot.jimple.ConditionExpr)toApp);
appendType = soot.BooleanType.v();
}
// handle shorts
if (appendType instanceof soot.ShortType || appendType instanceof soot.ByteType) {
soot.Local intLocal = lg.generateLocal(soot.IntType.v());
soot.jimple.Expr cast = soot.jimple.Jimple.v().newCastExpr(toApp, soot.IntType.v());
soot.jimple.Stmt castAssign = soot.jimple.Jimple.v().newAssignStmt(intLocal, cast);
body.getUnits().add(castAssign);
toApp = intLocal;
appendType = soot.IntType.v();
}
ArrayList paramsTypes = new ArrayList();
paramsTypes.add(appendType);
ArrayList params = new ArrayList();
params.add(toApp);
soot.SootClass classToInvoke = soot.Scene.v().getSootClass("java.lang.StringBuffer");
soot.SootMethod methodToInvoke = getMethodFromClass(classToInvoke, "append", paramsTypes, soot.RefType.v("java.lang.StringBuffer"));
soot.jimple.VirtualInvokeExpr appendInvoke = soot.jimple.Jimple.v().newVirtualInvokeExpr(sb, methodToInvoke, params);
soot.jimple.Stmt appendStmt = soot.jimple.Jimple.v().newInvokeStmt(appendInvoke);
body.getUnits().add(appendStmt);
Util.addLnPosTags(appendStmt, expr.position());
}
}
/**
* Unary Expression Creation
*/
private soot.Local getUnaryLocal(polyglot.ast.Unary unary) {
polyglot.ast.Expr expr = unary.expr();
polyglot.ast.Unary.Operator op = unary.operator();
if (op == polyglot.ast.Unary.POST_INC){
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.AssignStmt preStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(preStmt);
soot.jimple.AddExpr addExpr = soot.jimple.Jimple.v().newAddExpr(sootExpr, getConstant(retLocal.getType(), 1));
Util.addLnPosTags(addExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, addExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
soot.jimple.AssignStmt aStmt = soot.jimple.Jimple.v().newAssignStmt(sootExpr, local);
body.getUnits().add(aStmt);
Util.addLnPosTags(aStmt, expr.position());
Util.addLnPosTags(aStmt, unary.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess)) {
soot.Value actualUnaryExpr = createLHS(expr);
soot.jimple.AssignStmt s = soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local);
body.getUnits().add(s);
Util.addLnPosTags(s, expr.position());
Util.addLnPosTags(s.getLeftOpBox(), expr.position());
}
return retLocal;
}
else if (op == polyglot.ast.Unary.POST_DEC) {
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.AssignStmt preStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(preStmt);
soot.jimple.SubExpr subExpr = soot.jimple.Jimple.v().newSubExpr(sootExpr, getConstant(retLocal.getType(), 1));
Util.addLnPosTags(subExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, subExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
soot.jimple.AssignStmt aStmt = soot.jimple.Jimple.v().newAssignStmt(sootExpr, local);
body.getUnits().add(aStmt);
Util.addLnPosTags(aStmt, expr.position());
Util.addLnPosTags(aStmt, unary.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess)) {
soot.Value actualUnaryExpr = createLHS(expr);
soot.jimple.AssignStmt s = soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local);
body.getUnits().add(s);
Util.addLnPosTags(s, expr.position());
Util.addLnPosTags(s.getLeftOpBox(), expr.position());
}
return retLocal;
}
else if (op == polyglot.ast.Unary.PRE_INC) {
soot.Value sootExpr = createExpr(expr);
soot.jimple.AddExpr addExpr = soot.jimple.Jimple.v().newAddExpr(sootExpr, getConstant(sootExpr.getType(), 1));
Util.addLnPosTags(addExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, addExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess) || (expr instanceof polyglot.ast.Local)) {
soot.Value actualUnaryExpr = createLHS(expr);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local));
}
return local;
}
else if (op == polyglot.ast.Unary.PRE_DEC) {
soot.Value sootExpr = createExpr(expr);
soot.jimple.SubExpr subExpr = soot.jimple.Jimple.v().newSubExpr(sootExpr, getConstant(sootExpr.getType(), 1));
Util.addLnPosTags(subExpr.getOp1Box(), expr.position());
soot.Local local = generateLocal(expr.type());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(local, subExpr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, expr.position());
if ((expr instanceof polyglot.ast.Field) || (expr instanceof polyglot.ast.ArrayAccess) || (expr instanceof polyglot.ast.Local)) {
soot.Value actualUnaryExpr = createLHS(expr);
body.getUnits().add(soot.jimple.Jimple.v().newAssignStmt(actualUnaryExpr, local));
}
return local;
}
else if (op == polyglot.ast.Unary.BIT_NOT) {
soot.jimple.IntConstant int1 = soot.jimple.IntConstant.v(-1);
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.XorExpr xor = soot.jimple.Jimple.v().newXorExpr(sootExpr, getConstant(sootExpr.getType(), -1));
Util.addLnPosTags(xor.getOp1Box(), expr.position());
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, xor);
body.getUnits().add(assign1);
Util.addLnPosTags(assign1, unary.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.NEG) {
soot.Value sootExpr;
if (expr instanceof polyglot.ast.NumLit) {
int intVal = (int)((polyglot.ast.NumLit)expr).longValue();
sootExpr = soot.jimple.IntConstant.v(-intVal);
}
else if (expr instanceof polyglot.ast.FloatLit){
double doubleVal = ((polyglot.ast.FloatLit)expr).value();
if (((polyglot.ast.FloatLit)expr).kind() == polyglot.ast.FloatLit.DOUBLE){
sootExpr = soot.jimple.DoubleConstant.v(-doubleVal);
}
else {
sootExpr = soot.jimple.FloatConstant.v(-(float)doubleVal);
}
}
else {
soot.Value local = createExpr(expr);
soot.jimple.NegExpr negExpr = soot.jimple.Jimple.v().newNegExpr(local);
sootExpr = negExpr;
Util.addLnPosTags(negExpr.getOpBox(), expr.position());
}
soot.Local retLocal = generateLocal(expr.type());
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.POS) {
soot.Local retLocal = generateLocal(expr.type());
soot.Value sootExpr = createExpr(expr);
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, expr.position());
return retLocal;
}
else if (op == polyglot.ast.Unary.NOT) {
soot.Value local = createExpr(expr);
if (local instanceof soot.jimple.ConditionExpr){
local = handleCondBinExpr((soot.jimple.ConditionExpr)local);
}
soot.jimple.NeExpr neExpr = soot.jimple.Jimple.v().newNeExpr(local, getConstant(local.getType(), 0));
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt ifStmt = soot.jimple.Jimple.v().newIfStmt(neExpr, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt, expr.position());
soot.Local retLocal = lg.generateLocal(local.getType());
soot.jimple.Stmt assign1 = soot.jimple.Jimple.v().newAssignStmt(retLocal, getConstant(retLocal.getType(), 1));
body.getUnits().add(assign1);
Util.addLnPosTags(assign1, expr.position());
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
body.getUnits().add(noop1);
soot.jimple.Stmt assign2 = soot.jimple.Jimple.v().newAssignStmt(retLocal, getConstant(retLocal.getType(), 0));
body.getUnits().add(assign2);
Util.addLnPosTags(assign2, expr.position());
body.getUnits().add(noop2);
return retLocal;
}
else {
throw new RuntimeException("Unhandled Unary Expr");
}
}
/**
* Returns a needed constant given a type and val
*/
private soot.jimple.Constant getConstant(soot.Type type, int val) {
if (type instanceof soot.DoubleType) {
return soot.jimple.DoubleConstant.v(val);
}
else if (type instanceof soot.FloatType) {
return soot.jimple.FloatConstant.v(val);
}
else if (type instanceof soot.LongType) {
return soot.jimple.LongConstant.v(val);
}
else {
return soot.jimple.IntConstant.v(val);
}
}
/**
* Cast Expression Creation
*/
private soot.Value getCastLocal(polyglot.ast.Cast castExpr){
//System.out.println("castExpr: "+castExpr);
//System.out.println("castExpr type: "+castExpr.type());
//System.out.println("castExpr.expr type: "+castExpr.expr().type());
// if its already the right type
if (castExpr.expr().type().equals(castExpr.type())) {
return createExpr(castExpr.expr());
}
//else
soot.Value val;
/*if (castExpr.expr() instanceof polyglot.ast.Cast) {
val = createExpr(((polyglot.ast.Cast)castExpr.expr()).expr());
}
else {*/
val = createExpr(castExpr.expr());
soot.Type type = Util.getSootType(castExpr.type());
//System.out.println("soot type: "+type);
soot.jimple.CastExpr cast = soot.jimple.Jimple.v().newCastExpr(val, type);
//System.out.println("cast: "+cast+" cast type: "+cast.getCastType());
Util.addLnPosTags(cast.getOpBox(), castExpr.expr().position());
soot.Local retLocal = lg.generateLocal(cast.getCastType());
soot.jimple.Stmt castAssign = soot.jimple.Jimple.v().newAssignStmt(retLocal, cast);
body.getUnits().add(castAssign);
//System.out.println("castAssign: "+castAssign);
Util.addLnPosTags(castAssign, castExpr.position());
return retLocal;
}
/**
* Procedure Call Helper Methods
* Returns list of params
*/
private ArrayList getSootParams(polyglot.ast.ProcedureCall call) {
ArrayList sootParams = new ArrayList();
Iterator it = call.arguments().iterator();
while (it.hasNext()) {
polyglot.ast.Expr next = (polyglot.ast.Expr)it.next();
soot.Value nextExpr = createExpr(next);
if (nextExpr instanceof soot.jimple.ConditionExpr){
nextExpr = handleCondBinExpr((soot.jimple.ConditionExpr)nextExpr);
}
sootParams.add(nextExpr);
}
return sootParams;
}
/**
* Returns list of param types
*/
private ArrayList getSootParamsTypes(polyglot.ast.ProcedureCall call) {
ArrayList sootParamsTypes = new ArrayList();
Iterator it = call.procedureInstance().formalTypes().iterator();
while (it.hasNext()) {
Object next = it.next();
sootParamsTypes.add(Util.getSootType((polyglot.types.Type)next));
}
return sootParamsTypes;
}
/**
* Gets the Soot Method form the given Soot Class
*/
private soot.SootMethod getMethodFromClass(soot.SootClass sootClass, String name, ArrayList paramTypes, soot.Type returnType) {
//System.out.println("soot class: "+sootClass.getName());
//System.out.println("has meths: "+sootClass.getMethods());
//System.out.println("method name: "+name);
//System.out.println("method param types: "+paramTypes);
try {
//if (sootClass.declaresMethod(name, paramTypes, returnType)){
return sootClass.getMethod(name, paramTypes, returnType);
}
catch(Exception e){
//else {
// its possible we're looking for a init method for an inner
// class and we have a subclass as the outerclass this ref param
// type but the actually method uses the super class
if (paramTypes != null && !paramTypes.isEmpty()){
soot.SootClass firstParam = ((soot.RefType)paramTypes.get(0)).getSootClass();
boolean foundMeth = false;
while (!foundMeth){
paramTypes.set(0, firstParam.getSuperclass().getType());
if (sootClass.declaresMethod(name, paramTypes, returnType)){
return sootClass.getMethod(name, paramTypes, returnType);
}
firstParam = firstParam.getSuperclass();
}
}
}
return null;
}
/**
* Adds extra params
*/
private void handleFinalLocalParams(ArrayList sootParams, ArrayList sootParamTypes, polyglot.types.ClassType keyType){
HashMap finalLocalInfo = soot.javaToJimple.InitialResolver.v().finalLocalInfo();
if (finalLocalInfo != null){
if (finalLocalInfo.containsKey(new polyglot.util.IdentityKey(keyType))){
AnonLocalClassInfo alci = (AnonLocalClassInfo)finalLocalInfo.get(new polyglot.util.IdentityKey(keyType));
ArrayList finalLocals = alci.finalLocals();
if (finalLocals != null){
Iterator it = finalLocals.iterator();
while (it.hasNext()){
polyglot.types.LocalInstance li = (polyglot.types.LocalInstance)((polyglot.util.IdentityKey)it.next()).object();
sootParamTypes.add(Util.getSootType(li.type()));
sootParams.add(getLocal(li));
}
}
}
}
}
private soot.Local getThis(soot.Type sootType){
return Util.getThis(sootType, body, getThisMap, lg);
}
/**
* adds outer class params
*/
private void handleOuterClassParams(ArrayList sootParams, ArrayList sootParamsTypes, polyglot.types.ClassType typeToInvoke){
ArrayList needsRef = soot.javaToJimple.InitialResolver.v().getHasOuterRefInInit();
if ((needsRef != null) && (needsRef.contains(Util.getSootType(typeToInvoke)))){
soot.SootClass outerClass = ((soot.RefType)Util.getSootType(typeToInvoke.outer())).getSootClass();
soot.Local classToInvokeOuterParam = getThis(outerClass.getType());
sootParamsTypes.add(outerClass.getType());
sootParams.add(classToInvokeOuterParam);
}
}
/**
* Constructor Call Creation
*/
private void createConstructorCall(polyglot.ast.ConstructorCall cCall) {
ArrayList sootParams = new ArrayList();
ArrayList sootParamsTypes = new ArrayList();
polyglot.types.ConstructorInstance cInst = cCall.constructorInstance();
String containerName = null;
if (cInst.container() instanceof polyglot.types.ClassType) {
containerName = ((polyglot.types.ClassType)cInst.container()).fullName();
}
soot.SootClass classToInvoke;
if (cCall.kind() == polyglot.ast.ConstructorCall.SUPER) {
classToInvoke = ((soot.RefType)Util.getSootType(cInst.container())).getSootClass();
}
else if (cCall.kind() == polyglot.ast.ConstructorCall.THIS) {
classToInvoke = body.getMethod().getDeclaringClass();
}
else {
throw new RuntimeException("Unknown kind of Constructor Call");
}
soot.Local base = specialThisLocal;
if (cCall.qualifier() != null){
polyglot.types.ClassType objType = (polyglot.types.ClassType)cInst.container();
if ((objType.outer() != null) && (body.getMethod().getDeclaringClass().equals(((soot.RefType)Util.getSootType(objType.outer())).getSootClass()))){
handleOuterClassParams(sootParams, sootParamsTypes, objType);
}
else {
soot.Local qVal = (soot.Local)createExpr(cCall.qualifier());
sootParams.add(qVal);
sootParamsTypes.add(qVal.getType());
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(soot.jimple.Jimple.v().newVirtualInvokeExpr(qVal, soot.Scene.v().getSootClass("java.lang.Object").getMethodByName("getClass"), new ArrayList())));
}
}
else {
handleOuterClassParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)cInst.container());
}
int index = classToInvoke.getName().lastIndexOf("$");
sootParams.addAll(getSootParams(cCall));
sootParamsTypes.addAll(getSootParamsTypes(cCall));
if (index != -1){
// fix this to send the final locals list instead of empty array
handleFinalLocalParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)cCall.constructorInstance().container());
}
soot.SootMethod methodToInvoke = getMethodFromClass(classToInvoke, "<init>", sootParamsTypes, soot.VoidType.v());
soot.jimple.SpecialInvokeExpr specialInvokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(base, methodToInvoke, sootParams);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(specialInvokeExpr);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, cCall.position());
int numParams = 0;
Iterator invokeParamsIt = cCall.arguments().iterator();
while (invokeParamsIt.hasNext()) {
Util.addLnPosTags(specialInvokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)invokeParamsIt.next()).position());
numParams++;
}
// if method is <init> handle field inits
if (body.getMethod().getName().equals("<init>") && (cCall.kind() == polyglot.ast.ConstructorCall.SUPER)){
handleOuterClassThisInit(body.getMethod());
handleFinalLocalInits();
handleFieldInits(body.getMethod());
handleInitializerBlocks(body.getMethod());
}
}
private void handleFinalLocalInits(){
ArrayList finalsList = ((PolyglotMethodSource)body.getMethod().getSource()).getFinalsList();
if (finalsList == null) return;
int paramCount = paramRefCount - finalsList.size();
Iterator it = finalsList.iterator();
while (it.hasNext()){
soot.SootField sf = (soot.SootField)it.next();
soot.jimple.FieldRef fieldRef = soot.jimple.Jimple.v().newInstanceFieldRef(specialThisLocal, sf);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(fieldRef, body.getParameterLocal(paramCount));
body.getUnits().add(stmt);
paramCount++;
}
}
/**
* Local Class Decl - Local Inner Class
*/
private void createLocalClassDecl(polyglot.ast.LocalClassDecl cDecl) {
BiMap lcMap = InitialResolver.v().getLocalClassMap();
}
/**
* New Expression Creation
*/
private soot.Local getNewLocal(polyglot.ast.New newExpr) {
// handle parameters/args
ArrayList sootParams = new ArrayList();
ArrayList sootParamsTypes = new ArrayList();
polyglot.types.ClassType objType = (polyglot.types.ClassType)newExpr.objectType().type();
if (newExpr.anonType() != null){
objType = newExpr.anonType();
}
soot.RefType sootType = (soot.RefType)Util.getSootType(objType);
soot.Local retLocal = lg.generateLocal(sootType);
soot.jimple.NewExpr sootNew = soot.jimple.Jimple.v().newNewExpr(sootType);
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, sootNew);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, newExpr.position());
soot.SootClass classToInvoke = sootType.getSootClass();
if (newExpr.qualifier() != null){
if ((objType.outer() != null) && (body.getMethod().getDeclaringClass().equals(((soot.RefType)Util.getSootType(objType.outer())).getSootClass())) && (!soot.Modifier.isStatic(body.getMethod().getModifiers()))){
handleOuterClassParams(sootParams, sootParamsTypes, objType);
}
else {
soot.Value qVal = createExpr(newExpr.qualifier());
sootParams.add(qVal);
sootParamsTypes.add(qVal.getType());
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(soot.jimple.Jimple.v().newVirtualInvokeExpr((soot.Local)qVal, soot.Scene.v().getSootClass("java.lang.Object").getMethodByName("getClass"), new ArrayList())));
}
}
else {
handleOuterClassParams(sootParams, sootParamsTypes, objType);
}
sootParams.addAll(getSootParams(newExpr));
sootParamsTypes.addAll(getSootParamsTypes(newExpr));
handleFinalLocalParams(sootParams, sootParamsTypes, (polyglot.types.ClassType)objType);
soot.SootMethod methodToInvoke = getMethodFromClass(classToInvoke, "<init>", sootParamsTypes, soot.VoidType.v());
if (!methodToInvoke.getDeclaringClass().getType().equals(classToInvoke.getType())){
throw new RuntimeException("created new for type: "+classToInvoke.getType()+" but didn't find needed initializer there instead found initializer in "+methodToInvoke.getDeclaringClass().getType());
}
soot.jimple.SpecialInvokeExpr specialInvokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(retLocal, methodToInvoke, sootParams);
soot.jimple.Stmt invokeStmt = soot.jimple.Jimple.v().newInvokeStmt(specialInvokeExpr);
body.getUnits().add(invokeStmt);
Util.addLnPosTags(invokeStmt, newExpr.position());
int numParams = 0;
Iterator invokeParamsIt = newExpr.arguments().iterator();
while (invokeParamsIt.hasNext()) {
Util.addLnPosTags(specialInvokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)invokeParamsIt.next()).position());
numParams++;
}
return retLocal;
}
/**
* Call Expression Creation
*/
private soot.Local getCallLocal(polyglot.ast.Call call){
// handle name
String name = call.name();
// handle receiver/target
polyglot.ast.Receiver receiver = call.target();
soot.Local baseLocal;
if ((receiver instanceof polyglot.ast.Special) && (((polyglot.ast.Special)receiver).kind() == polyglot.ast.Special.SUPER) && (((polyglot.ast.Special)receiver).qualifier() != null)){
baseLocal = getSpecialSuperQualifierLocal(call);
return baseLocal;
}
baseLocal = (soot.Local)getBaseLocal(receiver);
soot.Type sootRecType = Util.getSootType(receiver.type());
soot.SootClass receiverTypeClass = soot.Scene.v().getSootClass("java.lang.Object");
if (sootRecType instanceof soot.RefType){
receiverTypeClass = ((soot.RefType)sootRecType).getSootClass();
}
polyglot.types.MethodInstance methodInstance = call.methodInstance();
soot.Type sootRetType = Util.getSootType(methodInstance.returnType());
ArrayList sootParamsTypes = getSootParamsTypes(call);
ArrayList sootParams = getSootParams(call);
soot.SootMethod callMethod = getMethodFromClass(receiverTypeClass, methodInstance.name(), sootParamsTypes, sootRetType);
boolean isPrivateAccess = false;
soot.javaToJimple.PolyglotMethodSource ms = (soot.javaToJimple.PolyglotMethodSource)body.getMethod().getSource();
if ((ms.getPrivateAccessMap() != null) && (ms.getPrivateAccessMap().containsKey(call.methodInstance()))){
callMethod = (soot.SootMethod)ms.getPrivateAccessMap().get(call.methodInstance());
if (!call.methodInstance().flags().isStatic()){
sootParams.add(baseLocal);
}
isPrivateAccess = true;
}
soot.jimple.InvokeExpr invokeExpr;
if (isPrivateAccess){
// for accessing private methods in outer class -> always static
invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(callMethod, sootParams);
}
else if (soot.Modifier.isInterface(receiverTypeClass.getModifiers()) && methodInstance.flags().isAbstract()) {
// if reciever class is interface and method is abstract -> interface
invokeExpr = soot.jimple.Jimple.v().newInterfaceInvokeExpr(baseLocal, callMethod, sootParams);
}
else if (methodInstance.flags().isStatic()){
// if flag isStatic -> static invoke
invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(callMethod, sootParams);
}
else if (methodInstance.flags().isPrivate()){
// if flag isPrivate -> special invoke
invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(baseLocal, callMethod, sootParams);
}
else if ((receiver instanceof polyglot.ast.Special) &&
(((polyglot.ast.Special)receiver).kind() == polyglot.ast.Special.SUPER)){
// receiver is special super -> special
invokeExpr = soot.jimple.Jimple.v().newSpecialInvokeExpr(baseLocal, callMethod, sootParams);
}
else {
// else virtual invoke
invokeExpr = soot.jimple.Jimple.v().newVirtualInvokeExpr(baseLocal, callMethod, sootParams);
}
int numParams = 0;
Iterator callParamsIt = call.arguments().iterator();
while (callParamsIt.hasNext()) {
Util.addLnPosTags(invokeExpr.getArgBox(numParams), ((polyglot.ast.Expr)callParamsIt.next()).position());
numParams++;
}
if (invokeExpr instanceof soot.jimple.InstanceInvokeExpr) {
Util.addLnPosTags(((soot.jimple.InstanceInvokeExpr)invokeExpr).getBaseBox(), call.target().position());
}
// create an assign stmt so invoke can be used somewhere else
if (invokeExpr.getMethod().getReturnType().equals(soot.VoidType.v())) {
soot.jimple.Stmt invoke = soot.jimple.Jimple.v().newInvokeStmt(invokeExpr);
body.getUnits().add(invoke);
Util.addLnPosTags(invoke, call.position());
return null;
}
else {
soot.Local retLocal = lg.generateLocal(invokeExpr.getMethod().getReturnType());
soot.jimple.Stmt assignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invokeExpr);
// add assign stmt to body
body.getUnits().add(assignStmt);
Util.addLnPosTags(assignStmt, call.position());
return retLocal;
}
}
private soot.Value getBaseLocal(polyglot.ast.Receiver receiver) {
if (receiver instanceof polyglot.ast.TypeNode) {
return generateLocal(((polyglot.ast.TypeNode)receiver).type());
}
else {
soot.Value val = createExpr((polyglot.ast.Expr)receiver);
if (val instanceof soot.jimple.Constant) {
soot.Local retLocal = lg.generateLocal(val.getType());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, val);
body.getUnits().add(stmt);
return retLocal;
}
return val;
}
}
/**
* NewArray Expression Creation
*/
private soot.Local getNewArrayLocal(polyglot.ast.NewArray newArrExpr) {
soot.Type sootType = Util.getSootType(newArrExpr.type());
soot.jimple.Expr expr;
if (newArrExpr.numDims() == 1) {
soot.Value dimLocal;
if (newArrExpr.additionalDims() == 1) {
dimLocal = soot.jimple.IntConstant.v(1);
}
else {
dimLocal = createExpr((polyglot.ast.Expr)newArrExpr.dims().get(0));
}
soot.jimple.NewArrayExpr newArrayExpr = soot.jimple.Jimple.v().newNewArrayExpr(((soot.ArrayType)sootType).getElementType(), dimLocal);
expr = newArrayExpr;
if (newArrExpr.additionalDims() != 1){
Util.addLnPosTags(newArrayExpr.getSizeBox(), ((polyglot.ast.Expr)newArrExpr.dims().get(0)).position());
}
}
else {
ArrayList valuesList = new ArrayList();
Iterator it = newArrExpr.dims().iterator();
while (it.hasNext()){
valuesList.add(createExpr((polyglot.ast.Expr)it.next()));
}
if (newArrExpr.additionalDims() != 0) {
valuesList.add(soot.jimple.IntConstant.v(newArrExpr.additionalDims()));
}
soot.jimple.NewMultiArrayExpr newMultiArrayExpr = soot.jimple.Jimple.v().newNewMultiArrayExpr((soot.ArrayType)sootType, valuesList);
expr = newMultiArrayExpr;
Iterator sizeBoxIt = newArrExpr.dims().iterator();
int counter = 0;
while (sizeBoxIt.hasNext()){
Util.addLnPosTags(newMultiArrayExpr.getSizeBox(counter), ((polyglot.ast.Expr)sizeBoxIt.next()).position());
counter++;
}
}
soot.Local retLocal = lg.generateLocal(sootType);
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, expr);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, newArrExpr.position());
Util.addLnPosTags(stmt.getRightOpBox(), newArrExpr.position());
// handle array init if one exists
if (newArrExpr.init() != null) {
soot.Value initVal = getArrayInitLocal(newArrExpr.init(), newArrExpr.type());
soot.jimple.AssignStmt initStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, initVal);
body.getUnits().add(initStmt);
}
return retLocal;
}
/**
* create ArrayInit given init and the array local
*/
private soot.Local getArrayInitLocal(polyglot.ast.ArrayInit arrInit, polyglot.types.Type lhsType) {
soot.Local local = generateLocal(lhsType);
soot.jimple.NewArrayExpr arrExpr = soot.jimple.Jimple.v().newNewArrayExpr(((soot.ArrayType)local.getType()).getElementType(), soot.jimple.IntConstant.v(arrInit.elements().size()));
soot.jimple.Stmt assign = soot.jimple.Jimple.v().newAssignStmt(local, arrExpr);
body.getUnits().add(assign);
Util.addLnPosTags(assign, arrInit.position());
Iterator it = arrInit.elements().iterator();
int index = 0;
while (it.hasNext()){
polyglot.ast.Expr elemExpr = (polyglot.ast.Expr)it.next();
soot.Value elem;
if (elemExpr instanceof polyglot.ast.ArrayInit){
if (((polyglot.ast.ArrayInit)elemExpr).type() instanceof polyglot.types.NullType) {
if (lhsType instanceof polyglot.types.ArrayType){
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.types.ArrayType)lhsType).base());
}
else {
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, lhsType);
}
}
else {
elem = getArrayInitLocal((polyglot.ast.ArrayInit)elemExpr, ((polyglot.ast.ArrayInit)elemExpr).type());
}
}
else {
elem = createExpr(elemExpr);
}
soot.jimple.ArrayRef arrRef = soot.jimple.Jimple.v().newArrayRef(local, soot.jimple.IntConstant.v(index));
soot.jimple.AssignStmt elemAssign = soot.jimple.Jimple.v().newAssignStmt(arrRef, elem);
body.getUnits().add(elemAssign);
Util.addLnPosTags(elemAssign, elemExpr.position());
Util.addLnPosTags(elemAssign.getRightOpBox(), elemExpr.position());
index++;
}
return local;
}
/**
* create LHS expressions
*/
private soot.Value createLHS(polyglot.ast.Expr expr) {
if (expr instanceof polyglot.ast.Local) {
return getLocal((polyglot.ast.Local)expr);
}
else if (expr instanceof polyglot.ast.ArrayAccess) {
return getArrayRefLocalLeft((polyglot.ast.ArrayAccess)expr);
}
else if (expr instanceof polyglot.ast.Field) {
return getFieldLocalLeft((polyglot.ast.Field)expr);
}
else {
throw new RuntimeException("Unhandled LHS");
}
}
/**
* Array Ref Expression Creation - LHS
*/
private soot.Value getArrayRefLocalLeft(polyglot.ast.ArrayAccess arrayRefExpr) {
polyglot.ast.Expr array = arrayRefExpr.array();
polyglot.ast.Expr access = arrayRefExpr.index();
soot.Local arrLocal = (soot.Local)createExpr(array);
soot.Value arrAccess = createExpr(access);
soot.Local retLocal = generateLocal(arrayRefExpr.type());
soot.jimple.ArrayRef ref = soot.jimple.Jimple.v().newArrayRef(arrLocal, arrAccess);
Util.addLnPosTags(ref.getBaseBox(), arrayRefExpr.array().position());
Util.addLnPosTags(ref.getIndexBox(), arrayRefExpr.index().position());
return ref;
}
/**
* Array Ref Expression Creation
*/
private soot.Value getArrayRefLocal(polyglot.ast.ArrayAccess arrayRefExpr) {
polyglot.ast.Expr array = arrayRefExpr.array();
polyglot.ast.Expr access = arrayRefExpr.index();
soot.Local arrLocal = (soot.Local)createExpr(array);
soot.Value arrAccess = createExpr(access);
soot.Local retLocal = generateLocal(arrayRefExpr.type());
soot.jimple.ArrayRef ref = soot.jimple.Jimple.v().newArrayRef(arrLocal, arrAccess);
Util.addLnPosTags(ref.getBaseBox(), arrayRefExpr.array().position());
Util.addLnPosTags(ref.getIndexBox(), arrayRefExpr.index().position());
soot.jimple.Stmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, ref);
body.getUnits().add(stmt);
Util.addLnPosTags(stmt, arrayRefExpr.position());
return retLocal;
}
private soot.Local getSpecialSuperQualifierLocal(polyglot.ast.Expr expr){
soot.SootClass classToInvoke;
ArrayList methodParams = new ArrayList();
if (expr instanceof polyglot.ast.Call){
polyglot.ast.Special target = (polyglot.ast.Special)((polyglot.ast.Call)expr).target();
classToInvoke = ((soot.RefType)Util.getSootType(target.qualifier().type())).getSootClass();
methodParams = getSootParams((polyglot.ast.Call)expr);
}
else if (expr instanceof polyglot.ast.Field){
polyglot.ast.Special target = (polyglot.ast.Special)((polyglot.ast.Field)expr).target();
classToInvoke = ((soot.RefType)Util.getSootType(target.qualifier().type())).getSootClass();
}
else {
throw new RuntimeException("Trying to create special super qualifier for: "+expr+" which is not a field or call");
}
// make an access method
soot.SootMethod methToInvoke = makeSuperAccessMethod(classToInvoke, expr);
// invoke it
soot.Local classToInvokeLocal = Util.getThis(classToInvoke.getType(), body, getThisMap, lg);
methodParams.add(0, classToInvokeLocal);
soot.jimple.InvokeExpr invokeExpr = soot.jimple.Jimple.v().newStaticInvokeExpr(methToInvoke, methodParams);
// return the local of return type if not void
if (!methToInvoke.getReturnType().equals(soot.VoidType.v())){
soot.Local retLocal = lg.generateLocal(methToInvoke.getReturnType());
soot.jimple.AssignStmt stmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, invokeExpr);
body.getUnits().add(stmt);
return retLocal;
}
else {
body.getUnits().add(soot.jimple.Jimple.v().newInvokeStmt(invokeExpr));
return null;
}
}
/**
* Special Expression Creation
*/
private soot.Local getSpecialLocal(polyglot.ast.Special specialExpr) {
if (specialExpr.kind() == polyglot.ast.Special.SUPER) {
if (specialExpr.qualifier() == null){
return specialThisLocal;
}
else {
// this isn't enough
// need to getThis for the type which may be several levels up
// add access$N method to class of the type which returns
// field or method wanted
// invoke it
// and it needs to be called specially when getting fields
// or calls because need to know field or method to access
// as it access' a field or meth in the super class of the
// outer class refered to by the qualifier
return getThis(Util.getSootType(specialExpr.qualifier().type()));
}
}
else if (specialExpr.kind() == polyglot.ast.Special.THIS) {
if (specialExpr.qualifier() == null) {
return specialThisLocal;
}
else {
return getThis(Util.getSootType(specialExpr.qualifier().type()));
}
}
else {
throw new RuntimeException("Unknown Special");
}
}
private soot.SootMethod makeSuperAccessMethod(soot.SootClass classToInvoke, Object memberToAccess){
String name = "access$"+soot.javaToJimple.InitialResolver.v().getNextPrivateAccessCounter()+"00";
ArrayList paramTypes = new ArrayList();
paramTypes.add(classToInvoke.getType());
soot.SootMethod meth;
soot.MethodSource src;
if (memberToAccess instanceof polyglot.ast.Field){
polyglot.ast.Field fieldToAccess = (polyglot.ast.Field)memberToAccess;
meth = new soot.SootMethod(name, paramTypes, Util.getSootType(fieldToAccess.type()), soot.Modifier.STATIC);
PrivateFieldAccMethodSource fSrc = new PrivateFieldAccMethodSource();
fSrc.fieldName(fieldToAccess.name());
fSrc.fieldType(Util.getSootType(fieldToAccess.type()));
fSrc.classToInvoke(((soot.RefType)Util.getSootType(fieldToAccess.target().type())).getSootClass());
src = fSrc;
}
else if (memberToAccess instanceof polyglot.ast.Call){
polyglot.ast.Call methToAccess = (polyglot.ast.Call)memberToAccess;
paramTypes.addAll(getSootParamsTypes(methToAccess));
meth = new soot.SootMethod(name, paramTypes, Util.getSootType(methToAccess.methodInstance().returnType()), soot.Modifier.STATIC);
PrivateMethodAccMethodSource mSrc = new PrivateMethodAccMethodSource();
mSrc.setMethodInst(methToAccess.methodInstance());
src = mSrc;
}
else {
throw new RuntimeException("trying to access unhandled member type: "+memberToAccess);
}
classToInvoke.addMethod(meth);
meth.setActiveBody(src.getBody(meth, null));
return meth;
}
/**
* InstanceOf Expression Creation
*/
private soot.Local getInstanceOfLocal(polyglot.ast.Instanceof instExpr) {
soot.Type sootType = Util.getSootType(instExpr.compareType().type());
soot.Value local = createExpr(instExpr.expr());
soot.jimple.InstanceOfExpr instOfExpr = soot.jimple.Jimple.v().newInstanceOfExpr(local, sootType);
soot.Local lhs = lg.generateLocal(soot.BooleanType.v());
soot.jimple.Stmt instAssign = soot.jimple.Jimple.v().newAssignStmt(lhs, instOfExpr);
body.getUnits().add(instAssign);
Util.addLnPosTags(instAssign, instExpr.position());
Util.addLnPosTags(instOfExpr.getOpBox(), instExpr.expr().position());
return lhs;
}
/**
* Condition Expression Creation - can maybe merge with If
*/
private soot.Local getConditionalLocal(polyglot.ast.Conditional condExpr){
// handle cond
polyglot.ast.Expr condition = condExpr.cond();
soot.Value sootCond = createExpr(condition);
boolean needIf = needSootIf(sootCond);
if (!(sootCond instanceof soot.jimple.ConditionExpr)) {
sootCond = soot.jimple.Jimple.v().newEqExpr(sootCond, soot.jimple.IntConstant.v(0));
}
else {
sootCond = reverseCondition((soot.jimple.ConditionExpr)sootCond);
sootCond = handleDFLCond((soot.jimple.ConditionExpr)sootCond);
}
soot.jimple.Stmt noop1 = soot.jimple.Jimple.v().newNopStmt();
if (needIf){
soot.jimple.IfStmt ifStmt = soot.jimple.Jimple.v().newIfStmt(sootCond, noop1);
body.getUnits().add(ifStmt);
Util.addLnPosTags(ifStmt, condExpr.position());
Util.addLnPosTags(ifStmt.getConditionBox(), condition.position());
}
soot.Local retLocal = generateLocal(condExpr.alternative().type());
// handle consequence
polyglot.ast.Expr consequence = condExpr.consequent();
soot.Value conseqVal = createExpr(consequence);
soot.jimple.AssignStmt conseqAssignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, conseqVal);
body.getUnits().add(conseqAssignStmt);
Util.addLnPosTags(conseqAssignStmt, condExpr.position());
Util.addLnPosTags(conseqAssignStmt.getRightOpBox(), consequence.position());
soot.jimple.Stmt noop2 = soot.jimple.Jimple.v().newNopStmt();
soot.jimple.Stmt goto1 = soot.jimple.Jimple.v().newGotoStmt(noop2);
body.getUnits().add(goto1);
// handle alternative
body.getUnits().add(noop1);
polyglot.ast.Expr alternative = condExpr.alternative();
if (alternative != null){
soot.Value altVal = createExpr(alternative);
soot.jimple.AssignStmt altAssignStmt = soot.jimple.Jimple.v().newAssignStmt(retLocal, altVal);
body.getUnits().add(altAssignStmt);
Util.addLnPosTags(altAssignStmt, condExpr.position());
Util.addLnPosTags(altAssignStmt, alternative.position());
Util.addLnPosTags(altAssignStmt.getRightOpBox(), alternative.position());
}
body.getUnits().add(noop2);
return retLocal;
}
/**
* Utility methods
*/
private boolean isLitOrLocal(polyglot.ast.Expr exp) {
if (exp instanceof polyglot.ast.Lit) return true;
if (exp instanceof polyglot.ast.Local) return true;
else return false;
}
/**
* Extra Local Variables Generation
*/
private soot.Local generateLocal(polyglot.types.Type polyglotType) {
soot.Type type = Util.getSootType(polyglotType);
return lg.generateLocal(type);
}
}
|
package test;
import java.net.Inet4Address;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import com.sun.org.apache.xerces.internal.dom.AttributeMap;
import pl.edu.mimuw.cloudatlas.model.Attribute;
import pl.edu.mimuw.cloudatlas.model.AttributesMap;
import pl.edu.mimuw.cloudatlas.model.PathName;
import pl.edu.mimuw.cloudatlas.model.Type;
import pl.edu.mimuw.cloudatlas.model.TypePrimitive;
import pl.edu.mimuw.cloudatlas.model.Value;
import pl.edu.mimuw.cloudatlas.model.ValueBoolean;
import pl.edu.mimuw.cloudatlas.model.ValueContact;
import pl.edu.mimuw.cloudatlas.model.ValueDouble;
import pl.edu.mimuw.cloudatlas.model.ValueDuration;
import pl.edu.mimuw.cloudatlas.model.ValueInt;
import pl.edu.mimuw.cloudatlas.model.ValueList;
import pl.edu.mimuw.cloudatlas.model.ValueNull;
import pl.edu.mimuw.cloudatlas.model.ValueSet;
import pl.edu.mimuw.cloudatlas.model.ValueString;
import pl.edu.mimuw.cloudatlas.model.ValueTime;
import pl.edu.mimuw.cloudatlas.serialization.Serializator;
import pl.edu.mimuw.cloudatlas.serialization.SerializatorAtributeMap;
import sun.net.InetAddressCachePolicy;
public class SerializationAttributesMapTest extends SerializatorTest<AttributesMap> {
public void addToList(List<AttributesMap> list, String s, Value value){
Attribute attribute = new Attribute(s);
AttributesMap attributesMap = new AttributesMap();
attributesMap.add(attribute, value);
list.add(attributesMap);
}
@Override
public List<AttributesMap> getObjects() {
List<AttributesMap> result = new ArrayList<AttributesMap>();
// Empty case:
AttributesMap attributesMap = new AttributesMap();
result.add(attributesMap);
//value String
addToList(result, "attribute1", new ValueString("value1"));
//value Boolean
addToList(result, "attribute2", new ValueBoolean(true));
//value Contact
/* PathName pathName = new PathName("/");
InetAddress inetAddress;
try {
inetAddress = InetAddress.getByName("127.0.0.1");
Value valueContact = new ValueContact(pathName, inetAddress);
addToList(result, "attribute3", valueContact);
} catch (UnknownHostException e) {
e.printStackTrace();
throw new RuntimeException(e);
} */
//value Double
addToList(result, "attributeDouble", new ValueDouble(4.4));
//valueDuration
addToList(result, "attributeDuration", new ValueDuration(5L));
//valueInt
addToList(result, "attributeInt", new ValueInt(14L));
//valueList
//List<Value> list = new ArrayList<Value>();
//list.add(new ValueDouble(8.8));
//addToList(result, "attributeList", new ValueList(list, TypePrimitive.DOUBLE));
//value null
addToList(result, "attributeNull", ValueNull.getInstance());
//valueSet
//Set<Value> set = new HashSet<Value>();
//set.add(new ValueDouble(8.8));
//addToList(result, "attributeSet", new ValueSet(set, TypePrimitive.DOUBLE));
//value Time
addToList(result, "attributeTime", new ValueTime(3L));
return result;
}
@Override
public Serializator<AttributesMap> getSerializator() {
SerializatorAtributeMap serializatorAtributeMap = new SerializatorAtributeMap();
return serializatorAtributeMap;
}
}
|
package guitests;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
public class ClearCommandTest extends TaskManagerGuiTest {
@Test
public void clear() {
//verify a non-empty list can be cleared
//TO BE UPDATED assertTrue(taskListPanel.isListMatching(td.getTypicalTasks()));
assertClearCommandSuccess();
//verify other commands can work after a clear command
commandBox.runCommand(td.gym.getAddCommand());
//TODO: remove nullpointerexception
//assertTrue(taskListPanel.isListMatching(td.gym));
commandBox.runCommand("delete 1");
assertListSize(0);
//verify clear command works when the list is empty
assertClearCommandSuccess();
}
private void assertClearCommandSuccess() {
commandBox.runCommand("clear");
assertListSize(0);
assertResultMessage("Dueue has been cleared!");
}
}
|
package hex.drf;
import hex.drf.DRF.DRFModel;
import org.junit.*;
import water.*;
import water.fvec.Frame;
import water.fvec.Vec;
public class DRFModelAdaptTest extends TestUtil {
private abstract class PrepData { abstract Vec prep(Frame fr); int needAdaptation(Frame fr) { return fr.numCols(); };}
@BeforeClass public static void stall() { stall_till_cloudsize(1); }
/**
* The scenario:
* - test data contains an input column which contains less enum values than the same column in train data.
* In this case we should provide correct values mapping:
* A - 0
* B - 1 B - 0 B - 1
* C - 2 D - 1 mapping should remap it into: D - 3
* D - 3
*/
@Test public void testModelAdapt() {
testModelAdaptation(
"./smalldata/test/classifier/coldom_train.csv",
"./smalldata/test/classifier/coldom_test.csv",
new PrepData() { @Override Vec prep(Frame fr) { return fr.vecs()[fr.numCols()-1]; } });
}
/**
* The scenario:
* - test data contains an input column which contains more enum values than the same column in train data.
* A - 0
* B - 1 B - 0 B - 1
* C - 2 X - 1 mapping should remap it into: X - NA
* D - 3
*/
@Test public void testModelAdapt2() {
testModelAdaptation(
"./smalldata/test/classifier/coldom_train.csv",
"./smalldata/test/classifier/coldom_test2.csv",
new PrepData() { @Override Vec prep(Frame fr) { return fr.vecs()[fr.numCols()-1]; } });
}
@Test public void testModelAdapt3() {
testModelAdaptation(
"./smalldata/test/classifier/coldom_train_2.csv",
"./smalldata/test/classifier/coldom_test_2.csv",
new PrepData() { @Override Vec prep(Frame fr) { return fr.vecs()[fr.find("R")]; }; @Override int needAdaptation(Frame fr) { return 0;} });
}
void testModelAdaptation(String train, String test, PrepData dprep) {
DRFModel model = null;
Frame frTest = null;
Frame frTrain = null;
Key trainKey = Key.make("train.hex");
Key testKey = Key.make("test.hex");
Frame[] frAdapted = null;
try {
// Prepare a simple model
frTrain = parseFrame(trainKey, train);
model = runDRF(frTrain,dprep);
// Load test dataset - test data contains input columns matching train data,
// BUT each input requires adaptation. Moreover, test data contains additional columns
// containing correct value mapping.
frTest = parseFrame(testKey, test);
Assert.assertEquals("TEST CONF ERROR: The test dataset should contain 2*<number of input columns>+1!", 2*(frTrain.numCols()-1)+1, frTest.numCols());
// Adapt test dataset
frAdapted = model.adapt(frTest, true); // do not perform translation to enums
Assert.assertEquals("Adapt method should return two frames", 2, frAdapted.length);
Assert.assertEquals("Test expects that all columns in test dataset has to be adapted", dprep.needAdaptation(frTrain), frAdapted[1].numCols());
// Compare vectors
Frame adaptedFrame = frAdapted[0];
//System.err.println(frTest.toStringAll());
//System.err.println(adaptedFrame.toStringAll());
for (int av=0; av<frTrain.numCols()-1; av++) {
int ev = av + frTrain.numCols();
Vec actV = adaptedFrame.vecs()[av];
Vec expV = frTest.vecs()[ev];
Assert.assertEquals("Different number of rows in test vectors", expV.length(), actV.length());
for (long r=0; r<expV.length(); r++) {
if (expV.isNA(r)) Assert.assertTrue("Badly adapted vector - expected NA! Col: " + av + ", row: " + r, actV.isNA(r));
else {
Assert.assertTrue("Badly adapted vector - expected value but get NA! Col: " + av + ", row: " + r, !actV.isNA(r));
Assert.assertEquals("Badly adapted vector - wrong values! Col: " + av + ", row: " + r, expV.at8(r), actV.at8(r));
}
}
}
} finally {
// Test cleanup
if (model!=null) UKV.remove(model._selfKey);
if (frTrain!=null) frTrain.remove();
UKV.remove(trainKey);
if (frTest!=null) frTest.remove();
UKV.remove(testKey);
// Remove adapted vectors which were saved into KV-store, rest of vectors are remove by frTest.remove()
if (frAdapted!=null) frAdapted[1].remove();
}
}
private DRFModel runDRF(Frame data, PrepData dprep) {
DRF drf = new DRF();
drf.source = data;
drf.response = dprep.prep(data);
drf.ntrees = 1;
drf.invoke();
return UKV.get(drf.dest());
}
}
|
package org.zstack.network.service.eip;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;
import org.zstack.core.Platform;
import org.zstack.core.cloudbus.CloudBus;
import org.zstack.core.cloudbus.MessageSafe;
import org.zstack.core.componentloader.PluginRegistry;
import org.zstack.core.db.*;
import org.zstack.core.db.SimpleQuery.Op;
import org.zstack.core.errorcode.ErrorFacade;
import org.zstack.core.workflow.FlowChainBuilder;
import org.zstack.core.workflow.ShareFlow;
import org.zstack.header.AbstractService;
import org.zstack.header.core.Completion;
import org.zstack.header.core.workflow.*;
import org.zstack.header.errorcode.ErrorCode;
import org.zstack.header.errorcode.OperationFailureException;
import org.zstack.header.exception.CloudRuntimeException;
import org.zstack.header.identity.*;
import org.zstack.header.identity.Quota.QuotaOperator;
import org.zstack.header.identity.Quota.QuotaPair;
import org.zstack.header.message.APIMessage;
import org.zstack.header.message.Message;
import org.zstack.header.message.NeedQuotaCheckMessage;
import org.zstack.header.network.l3.*;
import org.zstack.header.network.service.NetworkServiceProviderType;
import org.zstack.header.query.AddExpandedQueryExtensionPoint;
import org.zstack.header.query.ExpandedQueryAliasStruct;
import org.zstack.header.query.ExpandedQueryStruct;
import org.zstack.header.quota.QuotaConstant;
import org.zstack.header.vm.*;
import org.zstack.identity.AccountManager;
import org.zstack.identity.QuotaUtil;
import org.zstack.network.service.NetworkServiceManager;
import org.zstack.network.service.vip.*;
import org.zstack.tag.TagManager;
import org.zstack.utils.CollectionUtils;
import org.zstack.utils.Utils;
import org.zstack.utils.function.Function;
import org.zstack.utils.logging.CLogger;
import static org.zstack.core.Platform.operr;
import javax.persistence.TypedQuery;
import java.util.*;
import java.util.stream.Collectors;
import static org.zstack.utils.CollectionDSL.list;
public class EipManagerImpl extends AbstractService implements EipManager, VipReleaseExtensionPoint,
AddExpandedQueryExtensionPoint, ReportQuotaExtensionPoint, VmPreAttachL3NetworkExtensionPoint,
VmIpChangedExtensionPoint, ResourceOwnerAfterChangeExtensionPoint, VipGetServiceReferencePoint {
private static final CLogger logger = Utils.getLogger(EipManagerImpl.class);
@Autowired
private CloudBus bus;
@Autowired
private DatabaseFacade dbf;
@Autowired
private PluginRegistry pluginRgty;
@Autowired
private NetworkServiceManager nwServiceMgr;
@Autowired
private AccountManager acntMgr;
@Autowired
private VipManager vipMgr;
@Autowired
private TagManager tagMgr;
@Autowired
private ErrorFacade errf;
private Map<String, EipBackend> backends = new HashMap<>();
@Override
@MessageSafe
public void handleMessage(Message msg) {
if (msg instanceof APIMessage) {
handleApiMessage((APIMessage) msg);
} else {
handleLocalMessage(msg);
}
}
private void handleLocalMessage(Message msg) {
if (msg instanceof EipDeletionMsg) {
handle((EipDeletionMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(EipDeletionMsg msg) {
EipDeletionReply reply = new EipDeletionReply();
deleteEip(msg.getEipUuid(), new Completion(msg) {
@Override
public void success() {
bus.reply(msg, reply);
}
@Override
public void fail(ErrorCode errorCode) {
reply.setError(errorCode);
bus.reply(msg, reply);
}
});
}
private void handleApiMessage(APIMessage msg) {
if (msg instanceof APICreateEipMsg) {
handle((APICreateEipMsg) msg);
} else if (msg instanceof APIDeleteEipMsg) {
handle((APIDeleteEipMsg) msg);
} else if (msg instanceof APIAttachEipMsg) {
handle((APIAttachEipMsg) msg);
} else if (msg instanceof APIDetachEipMsg) {
handle((APIDetachEipMsg) msg);
} else if (msg instanceof APIChangeEipStateMsg) {
handle((APIChangeEipStateMsg) msg);
} else if (msg instanceof APIGetEipAttachableVmNicsMsg) {
handle((APIGetEipAttachableVmNicsMsg) msg);
} else if (msg instanceof APIUpdateEipMsg) {
handle((APIUpdateEipMsg) msg);
} else {
bus.dealWithUnknownMessage(msg);
}
}
private void handle(APIUpdateEipMsg msg) {
EipVO vo = dbf.findByUuid(msg.getUuid(), EipVO.class);
boolean update = false;
if (msg.getName() != null) {
vo.setName(msg.getName());
update = true;
}
if (msg.getDescription() != null) {
vo.setDescription(msg.getDescription());
update = true;
}
if (update) {
vo = dbf.updateAndRefresh(vo);
}
APIUpdateEipEvent evt = new APIUpdateEipEvent(msg.getId());
evt.setInventory(EipInventory.valueOf(vo));
bus.publish(evt);
}
@Transactional(readOnly = true)
private List<VmNicInventory> getAttachableVmNicForEip(VipInventory vip) {
String providerType = vip.getServiceProvider();
List<String> peerL3NetworkUuids = vip.getPeerL3NetworkUuids();
String zoneUuid = Q.New(L3NetworkVO.class)
.select(L3NetworkVO_.zoneUuid)
.eq(L3NetworkVO_.uuid, vip.getL3NetworkUuid())
.findValue();
List<String> l3Uuids;
if (providerType != null) {
l3Uuids = SQL.New("select l3.uuid" +
" from L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref, NetworkServiceProviderVO np" +
" where l3.system = :system" +
" and l3.uuid != :vipL3NetworkUuid" +
" and l3.uuid = ref.l3NetworkUuid" +
" and ref.networkServiceType = :nsType" +
" and l3.zoneUuid = :zoneUuid" +
" and np.uuid = ref.networkServiceProviderUuid" +
" and np.type = :npType")
.param("system", false)
.param("zoneUuid", zoneUuid)
.param("nsType", EipConstant.EIP_NETWORK_SERVICE_TYPE)
.param("npType", providerType)
.param("vipL3NetworkUuid", vip.getL3NetworkUuid())
.list();
} else {
l3Uuids = SQL.New("select l3.uuid" +
" from L3NetworkVO l3, NetworkServiceL3NetworkRefVO ref" +
" where l3.system = :system" +
" and l3.uuid != :vipL3NetworkUuid" +
" and l3.uuid = ref.l3NetworkUuid" +
" and ref.networkServiceType = :nsType" +
" and l3.zoneUuid = :zoneUuid")
.param("system", false)
.param("zoneUuid", zoneUuid)
.param("nsType", EipConstant.EIP_NETWORK_SERVICE_TYPE)
.param("vipL3NetworkUuid", vip.getL3NetworkUuid())
.list();
}
if (peerL3NetworkUuids != null) {
VmNicVO rnic = Q.New(VmNicVO.class).in(VmNicVO_.l3NetworkUuid, peerL3NetworkUuids)
.notNull(VmNicVO_.metaData).limit(1).find();
if (rnic != null) {
List<String> vrAttachedL3Uuids = Q.New(VmNicVO.class)
.select(VmNicVO_.l3NetworkUuid)
.eq(VmNicVO_.vmInstanceUuid, rnic.getVmInstanceUuid())
.listValues();
Set l3UuidSet = new HashSet<>(vrAttachedL3Uuids);
l3Uuids = l3Uuids.stream().filter(l -> l3UuidSet.contains(l)).collect(Collectors.toList());
}
}
if (l3Uuids.isEmpty()) {
return new ArrayList<>();
}
List<VmNicVO> nics = SQL.New("select nic" +
" from VmNicVO nic, VmInstanceVO vm" +
" where nic.l3NetworkUuid in (:l3Uuids)" +
" and nic.vmInstanceUuid = vm.uuid" +
" and vm.type = :vmType and vm.state in (:vmStates) " +
// IP = null means the VM is just recovered without any IP allocated
" and nic.ip is not null")
.param("l3Uuids", l3Uuids)
.param("vmType", VmInstanceConstant.USER_VM_TYPE)
.param("vmStates", EipConstant.attachableVmStates)
.list();
return VmNicInventory.valueOf(nics);
}
private List<VmNicInventory> filterVmNicsForEipInVirtualRouterExtensionPoint(VipInventory vip, List<VmNicInventory> vmNics) {
if (vmNics.isEmpty()){
return vmNics;
}
List<VmNicInventory> ret = new ArrayList<>(vmNics);
for (FilterVmNicsForEipInVirtualRouterExtensionPoint extp : pluginRgty.getExtensionList(FilterVmNicsForEipInVirtualRouterExtensionPoint.class)) {
ret = extp.filterVmNicsForEipInVirtualRouter(vip, ret);
}
return ret;
}
@Transactional(readOnly = true)
private List<VmNicInventory> getEipAttachableVmNics(APIGetEipAttachableVmNicsMsg msg){
VipVO vipvo = msg.getEipUuid() == null ?
Q.New(VipVO.class).eq(VipVO_.uuid, msg.getVipUuid()).find() :
SQL.New("select vip" +
" from VipVO vip, EipVO eip" +
" where eip.uuid = :eipUuid" +
" and eip.vipUuid = vip.uuid")
.param("eipUuid", msg.getEipUuid())
.find();
VipInventory vipInv = VipInventory.valueOf(vipvo);
List<VmNicInventory> nics = getAttachableVmNicForEip(vipInv);
if (nics != null && !nics.isEmpty()) {
logger.debug(String.format("get eip[uuid:%s] attachable vm nics[%s] before filter extension point",
msg.getEipUuid(), nics.stream().map(n -> n.getUuid()).collect(Collectors.toList())));
}
nics = filterVmNicsForEipInVirtualRouterExtensionPoint(vipInv, nics);
if (nics != null && !nics.isEmpty()) {
logger.debug(String.format("get eip[uuid:%s] attachable vm nics[%s] after filter extension point",
msg.getEipUuid(), nics.stream().map(n -> n.getUuid()).collect(Collectors.toList())));
}
return nics;
}
private void handle(APIGetEipAttachableVmNicsMsg msg) {
APIGetEipAttachableVmNicsReply reply = new APIGetEipAttachableVmNicsReply();
boolean isAttached = Q.New(EipVO.class).eq(EipVO_.uuid, msg.getEipUuid()).notNull(EipVO_.vmNicUuid).isExists();
reply.setInventories(isAttached ? new ArrayList<>() : getEipAttachableVmNics(msg));
bus.reply(msg, reply);
}
private void handle(APIChangeEipStateMsg msg) {
EipVO eip = dbf.findByUuid(msg.getUuid(), EipVO.class);
eip.setState(eip.getState().nextState(EipStateEvent.valueOf(msg.getStateEvent())));
eip = dbf.updateAndRefresh(eip);
APIChangeEipStateEvent evt = new APIChangeEipStateEvent(msg.getId());
evt.setInventory(EipInventory.valueOf(eip));
bus.publish(evt);
}
private void handle(APIDetachEipMsg msg) {
final APIDetachEipEvent evt = new APIDetachEipEvent(msg.getId());
final EipVO vo = dbf.findByUuid(msg.getUuid(), EipVO.class);
VmNicVO nicvo = dbf.findByUuid(vo.getVmNicUuid(), VmNicVO.class);
VmNicInventory nicInventory = VmNicInventory.valueOf(nicvo);
VipVO vipvo = dbf.findByUuid(vo.getVipUuid(), VipVO.class);
VipInventory vipInventory = VipInventory.valueOf(vipvo);
NetworkServiceProviderType providerType = nwServiceMgr.
getTypeOfNetworkServiceProviderForService(nicInventory.getL3NetworkUuid(), EipConstant.EIP_TYPE);
EipStruct struct = new EipStruct();
struct.setVip(vipInventory);
struct.setNic(nicInventory);
struct.setEip(EipInventory.valueOf(vo));
struct.setSnatInboundTraffic(EipGlobalConfig.SNAT_INBOUND_TRAFFIC.value(Boolean.class));
detachEipAndUpdateDb(struct, providerType.toString(), new Completion(msg) {
@Override
public void success() {
evt.setInventory(EipInventory.valueOf(dbf.reload(vo)));
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
}
});
}
private void handle(final APIAttachEipMsg msg) {
final APIAttachEipEvent evt = new APIAttachEipEvent(msg.getId());
final EipVO vo = dbf.findByUuid(msg.getEipUuid(), EipVO.class);
VmNicVO nicvo = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class);
final VmNicInventory nicInventory = VmNicInventory.valueOf(nicvo);
VipVO vipvo = dbf.findByUuid(vo.getVipUuid(), VipVO.class);
VipInventory vipInventory = VipInventory.valueOf(vipvo);
SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class);
q.select(VmInstanceVO_.state);
q.add(VmInstanceVO_.uuid, SimpleQuery.Op.EQ, nicvo.getVmInstanceUuid());
VmInstanceState state = q.findValue();
if (EipConstant.noNeedApplyOnBackendVmStates.contains(state)) {
vo.setVmNicUuid(nicInventory.getUuid());
vo.setGuestIp(nicvo.getIp());
EipVO evo = dbf.updateAndRefresh(vo);
evt.setInventory(EipInventory.valueOf(evo));
bus.publish(evt);
return;
}
NetworkServiceProviderType providerType = nwServiceMgr.getTypeOfNetworkServiceProviderForService(
nicInventory.getL3NetworkUuid(), EipConstant.EIP_TYPE);
EipStruct struct = new EipStruct();
struct.setNic(nicInventory);
struct.setVip(vipInventory);
struct.setEip(EipInventory.valueOf(vo));
struct.setSnatInboundTraffic(EipGlobalConfig.SNAT_INBOUND_TRAFFIC.value(Boolean.class));
attachEip(struct, providerType.toString(), new Completion(msg) {
@Override
public void success() {
vo.setVmNicUuid(nicInventory.getUuid());
vo.setGuestIp(nicInventory.getIp());
EipVO evo = dbf.updateAndRefresh(vo);
evt.setInventory(EipInventory.valueOf(evo));
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
}
});
}
private void deleteEip(String eipUuid, Completion completion) {
final EipVO vo = dbf.findByUuid(eipUuid, EipVO.class);
VipVO vipvo = dbf.findByUuid(vo.getVipUuid(), VipVO.class);
VipInventory vipInventory = VipInventory.valueOf(vipvo);
if (vo.getVmNicUuid() == null) {
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
Vip vip = new Vip(vo.getVipUuid());
vip.setStruct(struct);
vip.release(new Completion(completion) {
@Override
public void success() {
dbf.remove(vo);
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
completion.fail(errorCode);
}
});
return;
}
VmNicVO nicvo = dbf.findByUuid(vo.getVmNicUuid(), VmNicVO.class);
VmNicInventory nicInventory = VmNicInventory.valueOf(nicvo);
EipStruct struct = new EipStruct();
struct.setNic(nicInventory);
struct.setVip(vipInventory);
struct.setEip(EipInventory.valueOf(vo));
struct.setSnatInboundTraffic(EipGlobalConfig.SNAT_INBOUND_TRAFFIC.value(Boolean.class));
NetworkServiceProviderType providerType = nwServiceMgr.
getTypeOfNetworkServiceProviderForService(nicInventory.getL3NetworkUuid(), EipConstant.EIP_TYPE);
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("delete-eip-vmNic-%s-vip-%s", nicvo.getUuid(), vipvo.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-eip-from-backend";
@Override
public void run(FlowTrigger trigger, Map data) {
EipBackend bkd = getEipBackend(providerType.toString());
bkd.revokeEip(struct, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
//TODO: add GC instead of failing the API
logger.warn(String.format("failed to detach eip[uuid:%s, ip:%s, vm nic uuid:%s] on service provider[%s], service provider will garbage collect. %s",
struct.getEip().getUuid(), struct.getVip().getIp(), struct.getNic().getUuid(), providerType, errorCode));
trigger.fail(errorCode);
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "release-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
Vip vip = new Vip(vipInventory.getUuid());
vip.setStruct(struct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
dbf.remove(vo);
completion.success();
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
private void handle(APIDeleteEipMsg msg) {
final APIDeleteEipEvent evt = new APIDeleteEipEvent(msg.getId());
deleteEip(msg.getEipUuid(), new Completion(msg) {
@Override
public void success() {
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
}
});
}
private void handle(APICreateEipMsg msg) {
final APICreateEipEvent evt = new APICreateEipEvent(msg.getId());
EipVO vo = new EipVO();
if (msg.getResourceUuid() != null) {
vo.setUuid(msg.getResourceUuid());
} else {
vo.setUuid(Platform.getUuid());
}
vo.setName(msg.getName());
vo.setDescription(msg.getDescription());
vo.setVipUuid(msg.getVipUuid());
SimpleQuery<VipVO> vipq = dbf.createQuery(VipVO.class);
vipq.select(VipVO_.ip);
vipq.add(VipVO_.uuid, Op.EQ, msg.getVipUuid());
String vipIp = vipq.findValue();
vo.setVipIp(vipIp);
vo.setVmNicUuid(msg.getVmNicUuid());
vo.setState(EipState.Enabled);
EipVO finalVo1 = vo;
vo = new SQLBatchWithReturn<EipVO>() {
@Override
protected EipVO scripts() {
persist(finalVo1);
reload(finalVo1);
acntMgr.createAccountResourceRef(msg.getSession().getAccountUuid(), finalVo1.getUuid(), EipVO.class);
tagMgr.createTagsFromAPICreateMessage(msg, finalVo1.getUuid(), EipVO.class.getSimpleName());
return finalVo1;
}
}.execute();
VipVO vipvo = dbf.findByUuid(msg.getVipUuid(), VipVO.class);
final VipInventory vipInventory = VipInventory.valueOf(vipvo);
if (vo.getVmNicUuid() == null) {
EipVO finalVo = vo;
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
Vip vip = new Vip(vipInventory.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(msg) {
@Override
public void success() {
evt.setInventory(EipInventory.valueOf(finalVo));
logger.debug(String.format("successfully created eip[uuid:%s, name:%s] on vip[uuid:%s, ip:%s]",
finalVo.getUuid(), finalVo.getName(), vipInventory.getUuid(), vipInventory.getIp()));
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
}
});
return;
}
VmNicVO nicvo = dbf.findByUuid(msg.getVmNicUuid(), VmNicVO.class);
vo.setGuestIp(nicvo.getIp());
vo = dbf.updateAndRefresh(vo);
final EipInventory retinv = EipInventory.valueOf(vo);
final VmNicInventory nicInventory = VmNicInventory.valueOf(nicvo);
SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class);
q.select(VmInstanceVO_.state);
q.add(VmInstanceVO_.uuid, SimpleQuery.Op.EQ, nicvo.getVmInstanceUuid());
VmInstanceState state = q.findValue();
if (state != VmInstanceState.Running) {
EipVO finalVo = vo;
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
Vip vip = new Vip(vipInventory.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(msg) {
@Override
public void success() {
evt.setInventory(EipInventory.valueOf(finalVo));
logger.debug(String.format("successfully created eip[uuid:%s, name:%s] on vip[uuid:%s, ip:%s]",
finalVo.getUuid(), finalVo.getName(), vipInventory.getUuid(), vipInventory.getIp()));
bus.publish(evt);
}
@Override
public void fail(ErrorCode errorCode) {
evt.setError(errorCode);
bus.publish(evt);
}
});
return;
}
final EipVO fevo = vo;
EipStruct struct = new EipStruct();
struct.setEip(EipInventory.valueOf(vo));
struct.setSnatInboundTraffic(EipGlobalConfig.SNAT_INBOUND_TRAFFIC.value(Boolean.class));
struct.setNic(nicInventory);
struct.setVip(vipInventory);
NetworkServiceProviderType providerType = nwServiceMgr.getTypeOfNetworkServiceProviderForService(nicInventory.getL3NetworkUuid(), EipConstant.EIP_TYPE);
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("create-eip-vmNic-%s-vip-%s", msg.getVmNicUuid(), msg.getVipUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
String __name__ = "prepare-vip";
boolean s = false;
@Override
public void run(FlowTrigger trigger, Map data) {
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
struct.setPeerL3NetworkUuid(nicInventory.getL3NetworkUuid());
struct.setServiceProvider(providerType.toString());
Vip vip = new Vip(vipInventory.getUuid());
vip.setStruct(struct);
vip.acquire(new Completion(trigger) {
@Override
public void success() {
s = true;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (!s) {
trigger.rollback();
return;
}
ModifyVipAttributesStruct struct = new ModifyVipAttributesStruct();
struct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
Vip vip = new Vip(vipInventory.getUuid());
vip.setStruct(struct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.rollback();
}
@Override
public void fail(ErrorCode errorCode) {
logger.warn(errorCode.toString());
trigger.rollback();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "create-eip-on-backend";
@Override
public void run(FlowTrigger trigger, Map data) {
EipBackend bkd = getEipBackend(providerType.toString());
bkd.applyEip(struct, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(msg) {
@Override
public void handle(Map data) {
evt.setInventory(retinv);
logger.debug(String.format("successfully created eip[uuid:%s, name:%s] on vip[uuid:%s] for vm nic[uuid:%s]",
retinv.getUuid(), retinv.getName(), vipInventory.getUuid(), nicInventory.getUuid()));
bus.publish(evt);
}
});
error(new FlowErrorHandler(msg) {
@Override
public void handle(ErrorCode errCode, Map data) {
evt.setError(errCode);
dbf.remove(fevo);
logger.debug(String.format("failed to create eip[uuid:%s, name:%s] on vip[uuid:%s] for vm nic[uuid:%s], %s",
retinv.getUuid(), retinv.getName(), vipInventory.getUuid(), nicInventory.getUuid(), errCode));
bus.publish(evt);
}
});
}
}).start();
}
@Override
public String getId() {
return bus.makeLocalServiceId(EipConstant.SERVICE_ID);
}
private void populateExtensions() {
for (EipBackend ext : pluginRgty.getExtensionList(EipBackend.class)) {
EipBackend old = backends.get(ext.getNetworkServiceProviderType());
if (old != null) {
throw new CloudRuntimeException(String.format("duplicate EipBackend[%s,%s] for type[%s]", old.getClass().getName(),
ext.getClass().getName(), ext.getNetworkServiceProviderType()));
}
backends.put(ext.getNetworkServiceProviderType(), ext);
}
}
@Override
public boolean start() {
populateExtensions();
return true;
}
@Override
public boolean stop() {
return true;
}
@Override
public EipBackend getEipBackend(String providerType) {
EipBackend bkd = backends.get(providerType);
if (bkd == null) {
throw new CloudRuntimeException(String.format("cannot find EipBackend for type[%s]", providerType));
}
return bkd;
}
private void detachEip(final EipStruct struct, final String providerType, final boolean updateDb, final Completion completion) {
VmNicInventory nic = struct.getNic();
final EipInventory eip = struct.getEip();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("detach-eip-%s-vmNic-%s", eip.getUuid(), nic.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new NoRollbackFlow() {
String __name__ = "delete-eip-from-backend";
@Override
public void run(FlowTrigger trigger, Map data) {
EipBackend bkd = getEipBackend(providerType);
bkd.revokeEip(struct, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
//TODO add GC instead of failing the API
logger.warn(String.format("failed to detach eip[uuid:%s, ip:%s, vm nic uuid:%s] on service provider[%s], service provider will garbage collect. %s",
struct.getEip().getUuid(), struct.getVip().getIp(), struct.getNic().getUuid(), providerType, errorCode));
trigger.fail(errorCode);
}
});
}
});
if (updateDb) {
flow(new NoRollbackFlow() {
String __name__ = "udpate-eip";
@Override
public void run(FlowTrigger trigger, Map data) {
UpdateQuery q = UpdateQuery.New(EipVO.class);
q.condAnd(EipVO_.uuid, Op.EQ, eip.getUuid());
q.set(EipVO_.vmNicUuid, null);
q.set(EipVO_.guestIp, null);
q.update();
trigger.next();
}
});
}
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
@Override
public void detachEip(EipStruct struct, String providerType, final Completion completion) {
detachEip(struct, providerType, false, completion);
}
@Override
public void detachEipAndUpdateDb(EipStruct struct, String providerType, Completion completion) {
detachEip(struct, providerType, true, completion);
}
@Override
public void attachEip(final EipStruct struct, final String providerType, final Completion completion) {
final EipInventory eip = struct.getEip();
final VmNicInventory nic = struct.getNic();
FlowChain chain = FlowChainBuilder.newShareFlowChain();
chain.setName(String.format("attach-eip-%s-vmNic-%s", eip.getUuid(), nic.getUuid()));
chain.then(new ShareFlow() {
@Override
public void setup() {
flow(new Flow() {
boolean s = false;
String __name__ = "acquire-vip";
@Override
public void run(FlowTrigger trigger, Map data) {
ModifyVipAttributesStruct vipStruct = new ModifyVipAttributesStruct();
vipStruct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
vipStruct.setServiceProvider(providerType);
vipStruct.setPeerL3NetworkUuid(nic.getL3NetworkUuid());
Vip vip = new Vip(struct.getVip().getUuid());
vip.setStruct(vipStruct);
vip.acquire(new Completion(trigger) {
@Override
public void success() {
s = true;
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
@Override
public void rollback(FlowRollback trigger, Map data) {
if (!s) {
trigger.rollback();
return;
}
ModifyVipAttributesStruct vipStruct = new ModifyVipAttributesStruct();
vipStruct.setUseFor(EipConstant.EIP_NETWORK_SERVICE_TYPE);
Vip vip = new Vip(struct.getVip().getUuid());
vip.setStruct(vipStruct);
vip.release(new Completion(trigger) {
@Override
public void success() {
trigger.rollback();
}
@Override
public void fail(ErrorCode errorCode) {
logger.warn(errorCode.toString());
trigger.rollback();
}
});
}
});
flow(new NoRollbackFlow() {
String __name__ = "create-eip-on-backend";
@Override
public void run(FlowTrigger trigger, Map data) {
EipBackend bkd = getEipBackend(providerType);
bkd.applyEip(struct, new Completion(trigger) {
@Override
public void success() {
trigger.next();
}
@Override
public void fail(ErrorCode errorCode) {
trigger.fail(errorCode);
}
});
}
});
done(new FlowDoneHandler(completion) {
@Override
public void handle(Map data) {
completion.success();
}
});
error(new FlowErrorHandler(completion) {
@Override
public void handle(ErrorCode errCode, Map data) {
completion.fail(errCode);
}
});
}
}).start();
}
@Override
public String getVipUse() {
return EipConstant.EIP_NETWORK_SERVICE_TYPE;
}
@Override
public void releaseServicesOnVip(VipInventory vip, final Completion completion) {
SimpleQuery<EipVO> eq = dbf.createQuery(EipVO.class);
eq.add(EipVO_.vipUuid, SimpleQuery.Op.EQ, vip.getUuid());
final EipVO vo = eq.find();
if (vo == null || vo.getVmNicUuid() == null) {
if (vo != null) {
dbf.remove(vo);
}
completion.success();
return;
}
VmNicVO nicvo = dbf.findByUuid(vo.getVmNicUuid(), VmNicVO.class);
VmNicInventory nicInventory = VmNicInventory.valueOf(nicvo);
SimpleQuery<VmInstanceVO> q = dbf.createQuery(VmInstanceVO.class);
q.select(VmInstanceVO_.state);
q.add(VmInstanceVO_.uuid, SimpleQuery.Op.EQ, nicvo.getVmInstanceUuid());
VmInstanceState state = q.findValue();
if (VmInstanceState.Stopped == state) {
dbf.remove(vo);
completion.success();
return;
}
NetworkServiceProviderType providerType = nwServiceMgr.getTypeOfNetworkServiceProviderForService(nicInventory.getL3NetworkUuid(), EipConstant.EIP_TYPE);
EipStruct struct = new EipStruct();
struct.setVip(vip);
struct.setNic(nicInventory);
struct.setEip(EipInventory.valueOf(vo));
struct.setSnatInboundTraffic(EipGlobalConfig.SNAT_INBOUND_TRAFFIC.value(Boolean.class));
EipBackend bkd = getEipBackend(providerType.toString());
bkd.revokeEip(struct, new Completion(completion) {
@Override
public void success() {
dbf.remove(vo);
completion.success();
}
@Override
public void fail(ErrorCode errorCode) {
logger.warn(String.format("failed to detach eip[uuid:%s, ip:%s, vm nic uuid:%s] on service provider[%s], service provider will garbage collect. %s",
struct.getEip().getUuid(), struct.getVip().getIp(), struct.getNic().getUuid(), providerType, errorCode));
completion.fail(errorCode);
}
});
}
@Override
public List<ExpandedQueryStruct> getExpandedQueryStructs() {
List<ExpandedQueryStruct> structs = new ArrayList<>();
ExpandedQueryStruct struct = new ExpandedQueryStruct();
struct.setInventoryClassToExpand(VmNicInventory.class);
struct.setExpandedField("eip");
struct.setInventoryClass(EipInventory.class);
struct.setForeignKey("uuid");
struct.setExpandedInventoryKey("vmNicUuid");
structs.add(struct);
struct = new ExpandedQueryStruct();
struct.setInventoryClassToExpand(VipInventory.class);
struct.setExpandedField("eip");
struct.setInventoryClass(EipInventory.class);
struct.setForeignKey("uuid");
struct.setExpandedInventoryKey("vipUuid");
structs.add(struct);
return structs;
}
@Override
public List<ExpandedQueryAliasStruct> getExpandedQueryAliasesStructs() {
return null;
}
@Override
public List<Quota> reportQuota() {
QuotaOperator checker = new QuotaOperator() {
@Override
public void checkQuota(APIMessage msg, Map<String, QuotaPair> pairs) {
if (!new QuotaUtil().isAdminAccount(msg.getSession().getAccountUuid())) {
if (msg instanceof APICreateEipMsg) {
check((APICreateEipMsg) msg, pairs);
} else if (msg instanceof APIChangeResourceOwnerMsg) {
check((APIChangeResourceOwnerMsg) msg, pairs);
}
} else {
if (msg instanceof APIChangeResourceOwnerMsg) {
check((APIChangeResourceOwnerMsg) msg, pairs);
}
}
}
@Override
public void checkQuota(NeedQuotaCheckMessage msg, Map<String, QuotaPair> pairs) {
}
@Override
public List<Quota.QuotaUsage> getQuotaUsageByAccount(String accountUuid) {
Quota.QuotaUsage usage = new Quota.QuotaUsage();
usage.setName(EipConstant.QUOTA_EIP_NUM);
usage.setUsed(getUsedEipNum(accountUuid));
return list(usage);
}
@Transactional(readOnly = true)
private long getUsedEipNum(String accountUuid) {
String sql = "select count(eip)" +
" from EipVO eip, AccountResourceRefVO ref" +
" where ref.resourceUuid = eip.uuid" +
" and ref.accountUuid = :auuid" +
" and ref.resourceType = :rtype";
TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class);
q.setParameter("auuid", accountUuid);
q.setParameter("rtype", EipVO.class.getSimpleName());
Long usedEipNum = q.getSingleResult();
usedEipNum = usedEipNum == null ? 0 : usedEipNum;
return usedEipNum;
}
@Transactional(readOnly = true)
private long getVmEipNum(String vmUuid) {
String sql = "select count(eip)" +
" from EipVO eip, VmNicVO vmnic" +
" where vmnic.vmInstanceUuid = :vmuuid" +
" and vmnic.uuid = eip.vmNicUuid";
TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class);
q.setParameter("vmuuid", vmUuid);
Long vmEipNum = q.getSingleResult();
vmEipNum = vmEipNum == null ? 0 : vmEipNum;
return vmEipNum;
}
@Transactional(readOnly = true)
private void check(APICreateEipMsg msg, Map<String, QuotaPair> pairs) {
String currentAccountUuid = msg.getSession().getAccountUuid();
String resourceTargetOwnerAccountUuid = msg.getSession().getAccountUuid();
long eipNumQuota = pairs.get(EipConstant.QUOTA_EIP_NUM).getValue();
long usedEipNum = getUsedEipNum(msg.getSession().getAccountUuid());
long askedEipNum = 1;
QuotaUtil.QuotaCompareInfo quotaCompareInfo;
quotaCompareInfo = new QuotaUtil.QuotaCompareInfo();
quotaCompareInfo.currentAccountUuid = currentAccountUuid;
quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid;
quotaCompareInfo.quotaName = EipConstant.QUOTA_EIP_NUM;
quotaCompareInfo.quotaValue = eipNumQuota;
quotaCompareInfo.currentUsed = usedEipNum;
quotaCompareInfo.request = askedEipNum;
new QuotaUtil().CheckQuota(quotaCompareInfo);
}
@Transactional(readOnly = true)
private void check(APIChangeResourceOwnerMsg msg, Map<String, Quota.QuotaPair> pairs) {
String currentAccountUuid = msg.getSession().getAccountUuid();
String resourceTargetOwnerAccountUuid = msg.getAccountUuid();
if (new QuotaUtil().isAdminAccount(resourceTargetOwnerAccountUuid)) {
return;
}
SimpleQuery<AccountResourceRefVO> q = dbf.createQuery(AccountResourceRefVO.class);
q.add(AccountResourceRefVO_.resourceUuid, Op.EQ, msg.getResourceUuid());
AccountResourceRefVO accResRefVO = q.find();
if (accResRefVO.getResourceType().equals(VmInstanceVO.class.getSimpleName())) {
long eipNumQuota = pairs.get(EipConstant.QUOTA_EIP_NUM).getValue();
long usedEipNum = getUsedEipNum(resourceTargetOwnerAccountUuid);
long askedEipNum = getVmEipNum(msg.getResourceUuid());
QuotaUtil.QuotaCompareInfo quotaCompareInfo;
quotaCompareInfo = new QuotaUtil.QuotaCompareInfo();
quotaCompareInfo.currentAccountUuid = currentAccountUuid;
quotaCompareInfo.resourceTargetOwnerAccountUuid = resourceTargetOwnerAccountUuid;
quotaCompareInfo.quotaName = EipConstant.QUOTA_EIP_NUM;
quotaCompareInfo.quotaValue = eipNumQuota;
quotaCompareInfo.currentUsed = usedEipNum;
quotaCompareInfo.request = askedEipNum;
new QuotaUtil().CheckQuota(quotaCompareInfo);
}
}
};
Quota quota = new Quota();
quota.addMessageNeedValidation(APICreateEipMsg.class);
quota.addMessageNeedValidation(APIChangeResourceOwnerMsg.class);
quota.setOperator(checker);
QuotaPair p = new QuotaPair();
p.setName(EipConstant.QUOTA_EIP_NUM);
p.setValue(QuotaConstant.QUOTA_EIP_NUM);
quota.addPair(p);
return list(quota);
}
@Override
public void vmPreAttachL3Network(final VmInstanceInventory vm, final L3NetworkInventory l3) {
final List<String> nicUuids = CollectionUtils.transformToList(vm.getVmNics(),
new Function<String, VmNicInventory>() {
@Override
public String call(VmNicInventory arg) {
return arg.getUuid();
}
});
if (nicUuids.isEmpty()) {
return;
}
new Runnable() {
@Override
@Transactional(readOnly = true)
public void run() {
String sql = "select count(*)" +
" from EipVO eip, VipVO vip" +
" where eip.vipUuid = vip.uuid" +
" and vip.l3NetworkUuid = :l3Uuid" +
" and eip.vmNicUuid in (:nicUuids)";
TypedQuery<Long> q = dbf.getEntityManager().createQuery(sql, Long.class);
q.setParameter("l3Uuid", l3.getUuid());
q.setParameter("nicUuids", nicUuids);
Long count = q.getSingleResult();
if (count > 0) {
throw new OperationFailureException(operr("unable to attach the L3 network[uuid:%s, name:%s] to the vm[uuid:%s, name:%s]," +
" because the L3 network is providing EIP to one of the vm's nic",
l3.getUuid(), l3.getName(), vm.getUuid(), vm.getName()));
}
}
}.run();
}
@Override
public void vmIpChanged(VmInstanceInventory vm, VmNicInventory nic, UsedIpInventory oldIp, UsedIpInventory newIp) {
SimpleQuery<EipVO> q = dbf.createQuery(EipVO.class);
q.add(EipVO_.vmNicUuid, Op.EQ, nic.getUuid());
EipVO eip = q.find();
if (eip == null) {
return;
}
eip.setGuestIp(newIp.getIp());
dbf.update(eip);
logger.debug(String.format("update the EIP[uuid:%s, name:%s]'s guest IP from %s to %s for the nic[uuid:%s]",
eip.getUuid(), eip.getName(), oldIp.getIp(), newIp.getIp(), nic.getUuid()));
}
@Override
public void resourceOwnerAfterChange(AccountResourceRefInventory ref, String newOwnerUuid) {
if (!VmInstanceVO.class.getSimpleName().equals(ref.getResourceType())) {
return;
}
changeEipOwner(ref, newOwnerUuid);
}
@Transactional
private void changeEipOwner(AccountResourceRefInventory ref, String newOwnerUuid) {
String sql = "select eip.uuid" +
" from VmInstanceVO vm, VmNicVO nic, EipVO eip" +
" where vm.uuid = nic.vmInstanceUuid" +
" and nic.uuid = eip.vmNicUuid" +
" and vm.uuid = :uuid";
TypedQuery<String> q = dbf.getEntityManager().createQuery(sql, String.class);
q.setParameter("uuid", ref.getResourceUuid());
List<String> eipUuids = q.getResultList();
if (eipUuids.isEmpty()) {
logger.debug(String.format("Vm[uuid:%s] doesn't have any eip, there is no need to change owner of eip.",
ref.getResourceUuid()));
return;
}
for (String uuid : eipUuids) {
acntMgr.changeResourceOwner(uuid, newOwnerUuid);
}
}
@Override
public ServiceReference getServiceReference(String vipUuid) {
return new VipGetServiceReferencePoint.ServiceReference(EipConstant.EIP_NETWORK_SERVICE_TYPE, 0);
}
}
|
package info.cukes;
import org.assertj.core.api.Assertions;
import org.springframework.test.context.ContextConfiguration;
import cucumber.api.java.en.Given;
import cucumber.api.java.en.Then;
import cucumber.api.java.en.When;
import java.util.ArrayList;
import java.util.List;
import javax.inject.Inject;
/**
* <p>AuthorStepDefs cucumber glue class.</p>
*
* @author glick
*/
@SuppressWarnings("CdiInjectionPointsInspection")
@ContextConfiguration(locations = "/cucumber.xml")
public class AuthorStepDefs
{
@Inject
private AuthorRepository authorRepository;
@Inject
private BookRepository bookRepository;
@Inject
AuthorDelegate authorDelegate;
List<String> authorNames = new ArrayList<>();
List<Author> authorList = new ArrayList<>();
private int authorsAdded;
private int booksAdded;
/**
* capture the author names in the authorNames list for later use
* and create Author objects and store in authorList
*
* @param firstAuthorName author name
* @param secondAuthorName author name
* @throws Throwable
*/
@Given("^\"(.*?)\" and \"(.*?)\" are authors$")
public void and_are_authors(String firstAuthorName, String secondAuthorName) throws Throwable {
Author firstAuthor = new Author(firstAuthorName);
Author secondAuthor = new Author(secondAuthorName);
authorList.add(firstAuthor);
authorList.add(secondAuthor);
authorsAdded = authorList.size();
authorNames.add(firstAuthorName);
authorNames.add(secondAuthorName);
}
/**
* create the book
* fetch the authors from the persistent store
* add the book to each author
* add each author to the book
* store the book persistently
* spring/eclipselink dirty checking will cause the authors to be updated
* @param bookTitle the book title
* @throws Throwable
*/
@When("^they write a book entitled \"(.*?)\"$")
public void they_write_a_book_entitled(String bookTitle) throws Throwable {
Book localBook = new Book(bookTitle);
List<Author> transformedAuthorList = authorDelegate.assignBookToAuthors(authorList, localBook);
authorRepository.save(transformedAuthorList);
booksAdded = 1;
}
@Then("^their names should be associated with that title in the persistent store$")
public void their_names_should_be_associated_with_that_title_in_the_persistent_store() throws Throwable
{
List<Author> persistentAuthors = authorRepository.findAll();
Assertions.assertThat(persistentAuthors).hasSize(authorsAdded);
List<Book> books = bookRepository.findAll();
Assertions.assertThat(books).hasSize(booksAdded);
for (Author author : persistentAuthors)
{
Assertions.assertThat(authorNames).contains(author.getAuthorName());
Assertions.assertThat(books).containsAll(author.getAuthoredBooks());
for (Book book : books)
{
Assertions.assertThat(book.getBookAuthors()).contains(author);
}
}
}
}
|
package innovimax.mixthem;
import innovimax.mixthem.arguments.Rule;
import java.util.List;
/**
* <p>Describes a specific test run for a rule.</p>
* <p>Depends on the rule, test runs may have some parameter values or not.</p>
* @author Innovimax
* @version 1.0
*/
public class RuleRun {
final private int testId;
final private List<String> params;
/*
* Creates a rule run for all tests.
*/
public RuleRun(List<String> params) {
this(-1, params);
}
/*
* Creates a rule run for a specific test.
* @param testId The test identifier attached
* @param params The list if parameter values for this run
*/
public RuleRun(int testId, List<String> params) {
this.testId = testId;
this.params = params;
}
/*
* Returns true if the test is authorized for the run.
* @param testId The test identifier attached
* @param params The list if parameter values for this run
* @return Returns true if the test is authorized for the run
*/
public boolean accept(int testId) {
if (this.testId == -1) {
return true;
} else {
return this.testId == testId;
}
}
/*
* Returns the list of parameter values for this run.
* @return Returns the list of parameter values for this run.
*/
public List<String> getParams() {
return this.params;
}
}
|
package org.sqlite;
import static org.junit.Assert.*;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/** These tests are designed to stress Statements on memory databases. */
public class DBMetaDataTest
{
private Connection conn;
private Statement stat;
private DatabaseMetaData meta;
@BeforeClass
public static void forName() throws Exception {
Class.forName("org.sqlite.JDBC");
}
@Before
public void connect() throws Exception {
conn = DriverManager.getConnection("jdbc:sqlite:");
stat = conn.createStatement();
stat.executeUpdate("create table test (id integer primary key, fn float, sn);");
stat.executeUpdate("create view testView as select * from test;");
meta = conn.getMetaData();
}
@After
public void close() throws SQLException {
meta = null;
stat.close();
conn.close();
}
@Test
public void getTables() throws SQLException {
ResultSet rs = meta.getTables(null, null, null, null);
assertNotNull(rs);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"), "test");
assertEquals(rs.getString("TABLE_TYPE"), "TABLE");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"), "testView");
assertEquals(rs.getString("TABLE_TYPE"), "VIEW");
rs.close();
rs = meta.getTables(null, null, "bob", null);
assertFalse(rs.next());
rs.close();
rs = meta.getTables(null, null, "test", null);
assertTrue(rs.next());
assertFalse(rs.next());
rs.close();
rs = meta.getTables(null, null, "test%", null);
assertTrue(rs.next());
assertTrue(rs.next());
rs.close();
rs = meta.getTables(null, null, null, new String[] { "table" });
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"), "test");
assertFalse(rs.next());
rs.close();
rs = meta.getTables(null, null, null, new String[] { "view" });
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"), "testView");
assertFalse(rs.next());
rs.close();
}
@Test
public void getTableTypes() throws SQLException {
ResultSet rs = meta.getTableTypes();
assertNotNull(rs);
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_TYPE"), "TABLE");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_TYPE"), "VIEW");
assertFalse(rs.next());
}
@Test
public void getTypeInfo() throws SQLException {
ResultSet rs = meta.getTypeInfo();
assertNotNull(rs);
assertTrue(rs.next());
assertEquals(rs.getString("TYPE_NAME"), "BLOB");
assertTrue(rs.next());
assertEquals(rs.getString("TYPE_NAME"), "INTEGER");
assertTrue(rs.next());
assertEquals(rs.getString("TYPE_NAME"), "NULL");
assertTrue(rs.next());
assertEquals(rs.getString("TYPE_NAME"), "REAL");
assertTrue(rs.next());
assertEquals(rs.getString("TYPE_NAME"), "TEXT");
assertFalse(rs.next());
}
@Test
public void getColumns() throws SQLException {
ResultSet rs = meta.getColumns(null, null, "test", "id");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"), "test");
assertEquals(rs.getString("COLUMN_NAME"), "id");
assertEquals(rs.getInt("DATA_TYPE"), Types.INTEGER);
assertFalse(rs.next());
rs = meta.getColumns(null, null, "test", "fn");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "fn");
assertEquals(rs.getInt("DATA_TYPE"), Types.FLOAT);
assertFalse(rs.next());
rs = meta.getColumns(null, null, "test", "sn");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "sn");
assertFalse(rs.next());
rs = meta.getColumns(null, null, "test", "%");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "id");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "fn");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "sn");
assertFalse(rs.next());
rs = meta.getColumns(null, null, "test", "%n");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "fn");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "sn");
assertFalse(rs.next());
rs = meta.getColumns(null, null, "test%", "%");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "id");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "fn");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "sn");
assertFalse(rs.next());
rs = meta.getColumns(null, null, "%", "%");
assertTrue(rs.next());
assertEquals(rs.getString("TABLE_NAME"), "test");
assertEquals(rs.getString("COLUMN_NAME"), "id");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "fn");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "sn");
assertFalse(rs.next());
rs = meta.getColumns(null, null, "doesnotexist", "%");
assertFalse(rs.next());
}
@Test
public void columnOrderOfgetTables() throws SQLException {
ResultSet rs = meta.getTables(null, null, null, null);
assertTrue(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 10);
assertEquals(rsmeta.getColumnName(1), "TABLE_CAT");
assertEquals(rsmeta.getColumnName(2), "TABLE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "TABLE_NAME");
assertEquals(rsmeta.getColumnName(4), "TABLE_TYPE");
assertEquals(rsmeta.getColumnName(5), "REMARKS");
assertEquals(rsmeta.getColumnName(6), "TYPE_CAT");
assertEquals(rsmeta.getColumnName(7), "TYPE_SCHEM");
assertEquals(rsmeta.getColumnName(8), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(9), "SELF_REFERENCING_COL_NAME");
assertEquals(rsmeta.getColumnName(10), "REF_GENERATION");
}
@Test
public void columnOrderOfgetTableTypes() throws SQLException {
ResultSet rs = meta.getTableTypes();
assertTrue(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 1);
assertEquals(rsmeta.getColumnName(1), "TABLE_TYPE");
}
@Test
public void columnOrderOfgetTypeInfo() throws SQLException {
ResultSet rs = meta.getTypeInfo();
assertTrue(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 18);
assertEquals(rsmeta.getColumnName(1), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(2), "DATA_TYPE");
assertEquals(rsmeta.getColumnName(3), "PRECISION");
assertEquals(rsmeta.getColumnName(4), "LITERAL_PREFIX");
assertEquals(rsmeta.getColumnName(5), "LITERAL_SUFFIX");
assertEquals(rsmeta.getColumnName(6), "CREATE_PARAMS");
assertEquals(rsmeta.getColumnName(7), "NULLABLE");
assertEquals(rsmeta.getColumnName(8), "CASE_SENSITIVE");
assertEquals(rsmeta.getColumnName(9), "SEARCHABLE");
assertEquals(rsmeta.getColumnName(10), "UNSIGNED_ATTRIBUTE");
assertEquals(rsmeta.getColumnName(11), "FIXED_PREC_SCALE");
assertEquals(rsmeta.getColumnName(12), "AUTO_INCREMENT");
assertEquals(rsmeta.getColumnName(13), "LOCAL_TYPE_NAME");
assertEquals(rsmeta.getColumnName(14), "MINIMUM_SCALE");
assertEquals(rsmeta.getColumnName(15), "MAXIMUM_SCALE");
assertEquals(rsmeta.getColumnName(16), "SQL_DATA_TYPE");
assertEquals(rsmeta.getColumnName(17), "SQL_DATETIME_SUB");
assertEquals(rsmeta.getColumnName(18), "NUM_PREC_RADIX");
}
@Test
public void columnOrderOfgetColumns() throws SQLException {
ResultSet rs = meta.getColumns(null, null, "test", null);
assertTrue(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 22);
assertEquals(rsmeta.getColumnName(1), "TABLE_CAT");
assertEquals(rsmeta.getColumnName(2), "TABLE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "TABLE_NAME");
assertEquals(rsmeta.getColumnName(4), "COLUMN_NAME");
assertEquals(rsmeta.getColumnName(5), "DATA_TYPE");
assertEquals(rsmeta.getColumnName(6), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(7), "COLUMN_SIZE");
assertEquals(rsmeta.getColumnName(8), "BUFFER_LENGTH");
assertEquals(rsmeta.getColumnName(9), "DECIMAL_DIGITS");
assertEquals(rsmeta.getColumnName(10), "NUM_PREC_RADIX");
assertEquals(rsmeta.getColumnName(11), "NULLABLE");
assertEquals(rsmeta.getColumnName(12), "REMARKS");
assertEquals(rsmeta.getColumnName(13), "COLUMN_DEF");
assertEquals(rsmeta.getColumnName(14), "SQL_DATA_TYPE");
assertEquals(rsmeta.getColumnName(15), "SQL_DATETIME_SUB");
assertEquals(rsmeta.getColumnName(16), "CHAR_OCTET_LENGTH");
assertEquals(rsmeta.getColumnName(17), "ORDINAL_POSITION");
assertEquals(rsmeta.getColumnName(18), "IS_NULLABLE");
// should be SCOPE_CATALOG, but misspelt in the standard
assertEquals(rsmeta.getColumnName(19), "SCOPE_CATLOG");
assertEquals(rsmeta.getColumnName(20), "SCOPE_SCHEMA");
assertEquals(rsmeta.getColumnName(21), "SCOPE_TABLE");
assertEquals(rsmeta.getColumnName(22), "SOURCE_DATA_TYPE");
}
// the following functions always return an empty resultset, so
// do not bother testing their parameters, only the column types
@Test
public void columnOrderOfgetProcedures() throws SQLException {
ResultSet rs = meta.getProcedures(null, null, null);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 8);
assertEquals(rsmeta.getColumnName(1), "PROCEDURE_CAT");
assertEquals(rsmeta.getColumnName(2), "PROCEDURE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "PROCEDURE_NAME");
// currently (Java 1.5), cols 4,5,6 are undefined
assertEquals(rsmeta.getColumnName(7), "REMARKS");
assertEquals(rsmeta.getColumnName(8), "PROCEDURE_TYPE");
}
@Test
public void columnOrderOfgetProcedurColumns() throws SQLException {
ResultSet rs = meta.getProcedureColumns(null, null, null, null);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 13);
assertEquals(rsmeta.getColumnName(1), "PROCEDURE_CAT");
assertEquals(rsmeta.getColumnName(2), "PROCEDURE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "PROCEDURE_NAME");
assertEquals(rsmeta.getColumnName(4), "COLUMN_NAME");
assertEquals(rsmeta.getColumnName(5), "COLUMN_TYPE");
assertEquals(rsmeta.getColumnName(6), "DATA_TYPE");
assertEquals(rsmeta.getColumnName(7), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(8), "PRECISION");
assertEquals(rsmeta.getColumnName(9), "LENGTH");
assertEquals(rsmeta.getColumnName(10), "SCALE");
assertEquals(rsmeta.getColumnName(11), "RADIX");
assertEquals(rsmeta.getColumnName(12), "NULLABLE");
assertEquals(rsmeta.getColumnName(13), "REMARKS");
}
@Test
public void columnOrderOfgetSchemas() throws SQLException {
ResultSet rs = meta.getSchemas();
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 2);
assertEquals(rsmeta.getColumnName(1), "TABLE_SCHEM");
assertEquals(rsmeta.getColumnName(2), "TABLE_CATALOG");
}
@Test
public void columnOrderOfgetCatalogs() throws SQLException {
ResultSet rs = meta.getCatalogs();
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 1);
assertEquals(rsmeta.getColumnName(1), "TABLE_CAT");
}
@Test
public void columnOrderOfgetColumnPrivileges() throws SQLException {
ResultSet rs = meta.getColumnPrivileges(null, null, null, null);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 8);
assertEquals(rsmeta.getColumnName(1), "TABLE_CAT");
assertEquals(rsmeta.getColumnName(2), "TABLE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "TABLE_NAME");
assertEquals(rsmeta.getColumnName(4), "COLUMN_NAME");
assertEquals(rsmeta.getColumnName(5), "GRANTOR");
assertEquals(rsmeta.getColumnName(6), "GRANTEE");
assertEquals(rsmeta.getColumnName(7), "PRIVILEGE");
assertEquals(rsmeta.getColumnName(8), "IS_GRANTABLE");
}
@Test
public void columnOrderOfgetTablePrivileges() throws SQLException {
ResultSet rs = meta.getTablePrivileges(null, null, null);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 7);
assertEquals(rsmeta.getColumnName(1), "TABLE_CAT");
assertEquals(rsmeta.getColumnName(2), "TABLE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "TABLE_NAME");
assertEquals(rsmeta.getColumnName(4), "GRANTOR");
assertEquals(rsmeta.getColumnName(5), "GRANTEE");
assertEquals(rsmeta.getColumnName(6), "PRIVILEGE");
assertEquals(rsmeta.getColumnName(7), "IS_GRANTABLE");
}
@Test
public void columnOrderOfgetBestRowIdentifier() throws SQLException {
ResultSet rs = meta.getBestRowIdentifier(null, null, null, 0, false);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 8);
assertEquals(rsmeta.getColumnName(1), "SCOPE");
assertEquals(rsmeta.getColumnName(2), "COLUMN_NAME");
assertEquals(rsmeta.getColumnName(3), "DATA_TYPE");
assertEquals(rsmeta.getColumnName(4), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(5), "COLUMN_SIZE");
assertEquals(rsmeta.getColumnName(6), "BUFFER_LENGTH");
assertEquals(rsmeta.getColumnName(7), "DECIMAL_DIGITS");
assertEquals(rsmeta.getColumnName(8), "PSEUDO_COLUMN");
}
@Test
public void columnOrderOfgetVersionColumns() throws SQLException {
ResultSet rs = meta.getVersionColumns(null, null, null);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 8);
assertEquals(rsmeta.getColumnName(1), "SCOPE");
assertEquals(rsmeta.getColumnName(2), "COLUMN_NAME");
assertEquals(rsmeta.getColumnName(3), "DATA_TYPE");
assertEquals(rsmeta.getColumnName(4), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(5), "COLUMN_SIZE");
assertEquals(rsmeta.getColumnName(6), "BUFFER_LENGTH");
assertEquals(rsmeta.getColumnName(7), "DECIMAL_DIGITS");
assertEquals(rsmeta.getColumnName(8), "PSEUDO_COLUMN");
}
@Test
public void columnOrderOfgetPrimaryKeys() throws SQLException {
ResultSet rs;
ResultSetMetaData rsmeta;
stat.executeUpdate("create table nopk (c1, c2, c3, c4);");
stat.executeUpdate("create table pk1 (col1 primary key, col2, col3);");
stat.executeUpdate("create table pk2 (col1, col2 primary key, col3);");
stat.executeUpdate("create table pk3 (col1, col2, col3, col4, " + "primary key (col2, col3));");
rs = meta.getPrimaryKeys(null, null, "nopk");
assertFalse(rs.next());
rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 6);
assertEquals(rsmeta.getColumnName(1), "TABLE_CAT");
assertEquals(rsmeta.getColumnName(2), "TABLE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "TABLE_NAME");
assertEquals(rsmeta.getColumnName(4), "COLUMN_NAME");
assertEquals(rsmeta.getColumnName(5), "KEY_SEQ");
assertEquals(rsmeta.getColumnName(6), "PK_NAME");
rs.close();
rs = meta.getPrimaryKeys(null, null, "pk1");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "col1");
assertFalse(rs.next());
rs.close();
rs = meta.getPrimaryKeys(null, null, "pk2");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "col2");
assertFalse(rs.next());
rs.close();
rs = meta.getPrimaryKeys(null, null, "pk3");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "col2");
assertTrue(rs.next());
assertEquals(rs.getString("COLUMN_NAME"), "col3");
assertFalse(rs.next());
rs.close();
}
@Test
public void columnOrderOfgetImportedKeys() throws SQLException {
stat.executeUpdate("create table person (id integer)");
stat.executeUpdate("create table address (pid integer, name, foreign key(pid) references person(id))");
ResultSet importedKeys = meta.getImportedKeys("default", "global", "address");
assertTrue(importedKeys.next());
assertEquals("default", importedKeys.getString("PKTABLE_CAT"));
assertEquals("global", importedKeys.getString("PKTABLE_SCHEM"));
assertEquals("default", importedKeys.getString("FKTABLE_CAT"));
assertEquals("person", importedKeys.getString("PKTABLE_NAME"));
assertEquals("id", importedKeys.getString("PKCOLUMN_NAME"));
assertNotNull(importedKeys.getString("PK_NAME"));
assertNotNull(importedKeys.getString("FK_NAME"));
assertEquals("address", importedKeys.getString("FKTABLE_NAME"));
assertEquals("pid", importedKeys.getString("FKCOLUMN_NAME"));
importedKeys.close();
importedKeys = meta.getImportedKeys(null, null, "person");
assertTrue(!importedKeys.next());
importedKeys.close();
}
@Test
public void columnOrderOfgetExportedKeys() throws SQLException {
stat.executeUpdate("create table person (id integer)");
stat.executeUpdate("create table address (pid integer, name, foreign key(pid) references person(id))");
ResultSet exportedKeys = meta.getExportedKeys("default", "global", "person");
assertTrue(exportedKeys.next());
assertEquals("default", exportedKeys.getString("PKTABLE_CAT"));
assertEquals("global", exportedKeys.getString("PKTABLE_SCHEM"));
assertEquals("default", exportedKeys.getString("FKTABLE_CAT"));
assertEquals("global", exportedKeys.getString("FKTABLE_SCHEM"));
assertNotNull(exportedKeys.getString("PK_NAME"));
assertNotNull(exportedKeys.getString("FK_NAME"));
assertEquals("person", exportedKeys.getString("PKTABLE_NAME"));
assertEquals("id", exportedKeys.getString("PKCOLUMN_NAME"));
assertEquals("address", exportedKeys.getString("FKTABLE_NAME"));
assertEquals("pid", exportedKeys.getString("FKCOLUMN_NAME"));
exportedKeys.close();
exportedKeys = meta.getExportedKeys(null, null, "address");
assertTrue(!exportedKeys.next());
exportedKeys.close();
}
@Test
public void columnOrderOfgetCrossReference() throws SQLException {
stat.executeUpdate("create table person (id integer)");
stat.executeUpdate("create table address (pid integer, name, foreign key(pid) references person(id))");
ResultSet cr = meta.getCrossReference(null, null, "person", null, null, "address");
//assertTrue(cr.next());
}
/* TODO
@Test public void columnOrderOfgetTypeInfo() throws SQLException {
@Test public void columnOrderOfgetIndexInfo() throws SQLException {
@Test public void columnOrderOfgetSuperTypes() throws SQLException {
@Test public void columnOrderOfgetSuperTables() throws SQLException {
@Test public void columnOrderOfgetAttributes() throws SQLException {*/
@Test
public void columnOrderOfgetUDTs() throws SQLException {
ResultSet rs = meta.getUDTs(null, null, null, null);
assertFalse(rs.next());
ResultSetMetaData rsmeta = rs.getMetaData();
assertEquals(rsmeta.getColumnCount(), 7);
assertEquals(rsmeta.getColumnName(1), "TYPE_CAT");
assertEquals(rsmeta.getColumnName(2), "TYPE_SCHEM");
assertEquals(rsmeta.getColumnName(3), "TYPE_NAME");
assertEquals(rsmeta.getColumnName(4), "CLASS_NAME");
assertEquals(rsmeta.getColumnName(5), "DATA_TYPE");
assertEquals(rsmeta.getColumnName(6), "REMARKS");
assertEquals(rsmeta.getColumnName(7), "BASE_TYPE");
}
@Test
public void version() throws SQLException {
assertNotNull(meta.getDatabaseProductVersion());
}
}
|
package product;
import operators.base.RefreshOperator;
import operators.configurations.BuildConfigurationSetPageOperator;
import operators.products.ProductPageOperator;
import operators.projects.ProjectPageOperator;
import operators.products.ImportPageOperator;
import org.junit.Test;
import ui.UITest;
public class ImportProductTest extends UITest {
@Test
public void pncSimpleProject() {
importConfig("pnc-simple-test", "1.0", "PNC Simple Test",
"https://github.com/project-ncl/pnc-simple-test-project.git",
"master",
"mvn clean deploy");
}
@Test
public void jdg() {
importConfig("jdg-infinispan", "7.0", "JDG Infinispan",
"http://git.app.eng.bos.redhat.com/infinispan/infinispan.git",
"JDG_7.0.0.ER4_pnc_wa_5",
"mvn clean deploy -DskipTests -Pdistribution");
}
@Test
public void fabric8() {
importConfig("fabric8", "8.0", "Fabric8",
"https://github.com/fabric8io/fabric8.git",
"master",
"mvn clean deploy -DskipTests=true");
}
@Test
public void keycloak() {
importConfig("keycloak", "1.9", "Keycloak",
"https://github.com/keycloak/keycloak.git",
"master",
"mvn clean deploy -Pdistribution -DskipTests=true");
}
@Test
public void pnc() {
importConfig("pnc-ncl", "1.0", "PNC NCL",
"https://github.com/project-ncl/pnc.git",
"master",
"mvn clean deploy -DskipTests=true");
}
@Test
public void antlr() {
importConfig("antlr", "2.7", "Antlr",
"http://git.app.eng.bos.redhat.com/git/antlr2.git",
"9f6163d",
"mvn clean deploy");
}
@Test
public void sso() {
importConfig("keycloak", "1.9", "RH SSO",
"http://git.engineering.redhat.com/git/users/pkralik/keycloak-prod.git",
"1.9.0.Final-redhat",
"mvn clean deploy -Pdistribution");
}
private void importConfig(String... param) {
new ProductPageOperator(param[0]).createProduct(param[2] + " product");
new RefreshOperator().refresh();
new ProjectPageOperator(param[0]).createProject(param[2] + " project");
new RefreshOperator().refresh();
ImportPageOperator product = new ImportPageOperator(param[0]);
product.importProduct(param[1], param[3], param[4], param[5]);
product.buildConfigurationSet();
String buildName = product.getConfigSetName();
new BuildConfigurationSetPageOperator(buildName).menuBuildGroups();
assertLinkExists(buildName);
}
}
|
package org.jetbrains.idea.svn.actions;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.DataContext;
import com.intellij.openapi.actionSystem.PlatformDataKeys;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.vcs.AbstractVcs;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.idea.svn.SvnVcs;
import org.jetbrains.idea.svn.dialogs.ShareDialog;
import org.tmatesoft.svn.core.SVNCommitInfo;
import org.tmatesoft.svn.core.SVNException;
import org.tmatesoft.svn.core.SVNURL;
import org.tmatesoft.svn.core.wc.SVNRevision;
import org.tmatesoft.svn.core.wc.SVNWCUtil;
import java.io.File;
public class ShareProjectAction extends BasicAction {
protected String getActionName(AbstractVcs vcs) {
return "Share Directory...";
}
public void update(AnActionEvent e) {
Presentation presentation = e.getPresentation();
final DataContext dataContext = e.getDataContext();
Project project = PlatformDataKeys.PROJECT.getData(dataContext);
if (project == null) {
presentation.setEnabled(false);
presentation.setVisible(false);
return;
}
VirtualFile[] files = PlatformDataKeys.VIRTUAL_FILE_ARRAY.getData(dataContext);
if (files == null || files.length == 0) {
presentation.setEnabled(false);
presentation.setVisible(false);
return;
}
boolean enabled = false;
boolean visible = false;
if (files.length == 1) {
visible = true;
if (!SVNWCUtil.isVersionedDirectory(new File(files [0].getPath()))) {
enabled = true;
}
}
presentation.setEnabled(enabled);
presentation.setVisible(visible);
}
protected boolean isEnabled(Project project, SvnVcs vcs, VirtualFile file) {
return false;
}
protected boolean needsFiles() {
return true;
}
protected void perform(Project project, final SvnVcs activeVcs, final VirtualFile file, DataContext context) throws VcsException {
ShareDialog shareDialog = new ShareDialog(project);
shareDialog.show();
final String parent = shareDialog.getSelectedURL();
if (shareDialog.isOK() && parent != null) {
final SVNException[] error = new SVNException[1];
ProgressManager.getInstance().runProcessWithProgressSynchronously(new Runnable() {
public void run() {
try {
SVNURL url = SVNURL.parseURIEncoded(parent).appendPath(file.getName(), false);
SVNCommitInfo info = activeVcs.createCommitClient().doMkDir(new SVNURL[] {url}, "Directory '" + file.getName() +"' created by IntelliJ IDEA");
SVNRevision revision = SVNRevision.create(info.getNewRevision());
activeVcs.createUpdateClient().doCheckout(url, new File(file.getPath()), SVNRevision.UNDEFINED, revision, true);
activeVcs.createWCClient().doAdd(new File(file.getPath()), true, false, false, true);
} catch (SVNException e) {
error[0] = e;
}
}
}, "Share Directory", false, project);
if (error[0] != null) {
throw new VcsException(error[0].getMessage());
}
Messages.showInfoMessage(project, "To complete share operation commit '" + file.getName() + "'.", "Share Directory");
}
}
protected void batchPerform(Project project, final SvnVcs activeVcs, VirtualFile[] file, DataContext context) throws VcsException {
}
protected boolean isBatchAction() {
return false;
}
}
|
package net.ontopia.presto.spi.rules;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.ontopia.presto.spi.PrestoDataProvider;
import net.ontopia.presto.spi.PrestoField;
import net.ontopia.presto.spi.PrestoSchemaProvider;
import net.ontopia.presto.spi.PrestoTopic;
import net.ontopia.presto.spi.PrestoType;
import net.ontopia.presto.spi.functions.PrestoFieldFunction;
import net.ontopia.presto.spi.functions.PrestoFieldFunctionUtils;
import net.ontopia.presto.spi.utils.PrestoAttributes;
import net.ontopia.presto.spi.utils.PrestoContext;
import net.ontopia.presto.spi.utils.PrestoContextRules;
public class PathExpressions {
private static final Pattern PATTERN = Pattern.compile("^\\$\\{([\\:\\.\\-\\w]+)\\}$");
public static List<? extends Object> getValues(PrestoDataProvider dataProvider, PrestoSchemaProvider schemaProvider, PrestoContextRules rules, String path) {
if (path.charAt(0) == '$') {
return getValuesByExpression(dataProvider, schemaProvider, rules, path);
} else {
return getValuesByField(dataProvider, schemaProvider, rules, path);
}
}
private static List<? extends Object> getValuesByField(PrestoDataProvider dataProvider, PrestoSchemaProvider schemaProvider,
PrestoContextRules rules, String fieldId) {
PrestoContext context = rules.getContext();
PrestoType type = context.getType();
PrestoField field = type.getFieldById(fieldId);
return rules.getFieldValues(field).getValues();
}
private static List<? extends Object> getValuesByExpression(PrestoDataProvider dataProvider, PrestoSchemaProvider schemaProvider,
PrestoContextRules rules, String expr) {
Iterator<String> path = parsePath(expr).iterator();
if (path.hasNext()) {
PrestoAttributes attributes = rules.getAttributes();
PrestoContext context = rules.getContext();
return getValuesByExpression(dataProvider, schemaProvider, attributes, Collections.singletonList(context), path, expr);
} else {
return Collections.emptyList();
}
}
private static List<? extends Object> getValuesByExpression(PrestoDataProvider dataProvider, PrestoSchemaProvider schemaProvider,
PrestoAttributes attributes, List<PrestoContext> contexts, Iterator<String> path, String expr) {
if (contexts.isEmpty()) {
return Collections.emptyList();
}
String p = path.next();
boolean hasNext = path.hasNext();
if (hasNext) {
List<PrestoContext> nextContexts = new ArrayList<PrestoContext>();
for (PrestoContext context : contexts) {
PrestoType type = context.getType();
if (":parent".equals(p)) {
PrestoContext nextContext = context.getParentContext();
if (nextContext == null) {
throw new RuntimeException("Missing parent context from expression: " + expr);
}
nextContexts.add(nextContext);
} else if (p.startsWith("
if (!context.isNewTopic()) {
PrestoField valueField = type.getFieldById(p.substring(1));
PrestoTopic topic = context.getTopic();
for (Object value : topic.getStoredValues(valueField)) {
if (value instanceof PrestoTopic) {
PrestoTopic valueTopic = (PrestoTopic)value;
nextContexts.add(PrestoContext.createSubContext(dataProvider, schemaProvider, context, valueField, valueTopic));
}
}
}
} else {
PrestoField valueField = type.getFieldById(p);
PrestoFieldFunction function = PrestoFieldFunctionUtils.createFieldFunction(dataProvider, schemaProvider, attributes, valueField);
List<? extends Object> fieldValues = null;
if (function != null) {
fieldValues = function.execute(context, valueField, null);
} else if (!context.isNewTopic()) {
PrestoTopic topic = context.getTopic();
fieldValues = topic.getValues(valueField);
}
if (fieldValues != null) {
for (Object value : fieldValues) {
if (value instanceof PrestoTopic) {
PrestoTopic valueTopic = (PrestoTopic)value;
nextContexts.add(PrestoContext.createSubContext(dataProvider, schemaProvider, context, valueField, valueTopic));
}
}
}
}
}
return getValuesByExpression(dataProvider, schemaProvider, attributes, nextContexts, path, expr);
} else {
List<Object> values = new ArrayList<Object>();
for (PrestoContext context : contexts) {
PrestoType type = context.getType();
boolean isNew = context.isNewTopic();
PrestoTopic topic = context.getTopic();
if (!isNew && p.equals(":id")) {
values.add(topic.getId());
} else if (!isNew && p.equals(":name")) {
values.add(topic.getName());
} else if (!isNew && p.startsWith("
PrestoField valueField = type.getFieldById(p.substring(1));
for (Object value : topic.getStoredValues(valueField)) {
values.add(value);
}
} else if (p.equals(":type")) {
values.add(type.getId());
} else if (p.equals(":type-name")) {
values.add(type.getName());
} else {
PrestoField field = type.getFieldById(p);
PrestoFieldFunction function = PrestoFieldFunctionUtils.createFieldFunction(dataProvider, schemaProvider, attributes, field);
if (function != null) {
values.addAll(function.execute(context, field, null));
} else {
if (!isNew) {
values.addAll(topic.getValues(field));
}
}
}
}
return values;
}
}
static List<String> parsePath(String expr) {
Matcher matcher = PATTERN.matcher(expr);
if (matcher.find()) {
String path = matcher.group(1);
List<String> result = new ArrayList<String>(3);
int prev = 0;
int next = path.indexOf('.', prev);
while (next != -1) {
String fieldId = path.substring(prev, next);
result.add(fieldId);
prev = next + 1;
next = path.indexOf('.', prev);
}
String fieldId = path.substring(prev);
result.add(fieldId);
return result;
} else {
throw new RuntimeException("Invalid path expression: " + expr);
}
}
public static void main(String[] args) {
String expr = "${partial_runs.blah.has_run.foo}";
System.out.println("X: " + parsePath(expr));
}
}
|
package com.jetbrains.python.testing;
import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.LabeledComponent;
import com.intellij.openapi.ui.TextFieldWithBrowseButton;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.ui.PanelWithAnchor;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.run.AbstractPyCommonOptionsForm;
import com.jetbrains.python.run.AbstractPythonRunConfigurationParams;
import com.jetbrains.python.run.PyCommonOptionsFormFactory;
import javax.swing.*;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import static com.intellij.openapi.util.io.FileUtil.toSystemIndependentName;
import static com.jetbrains.python.testing.unittest.PythonUnitTestRunConfiguration.TestType;
/**
* @author Leonid Shalupov
*/
public class PythonTestRunConfigurationForm implements AbstractPythonTestRunConfigurationParams, PanelWithAnchor {
private JPanel myRootPanel;
private LabeledComponent myTestClassComponent;
private LabeledComponent myTestMethodComponent;
private LabeledComponent myTestFolderComponent;
private LabeledComponent myTestScriptComponent;
private JRadioButton myAllInFolderRB;
private JRadioButton myTestScriptRB;
private JRadioButton myTestClassRB;
private JRadioButton myTestMethodRB;
private JRadioButton myTestFunctionRB;
private JPanel myAdditionalPanel;
private JPanel myCommonOptionsPlaceholder;
private JPanel myTestsPanel;
private JCheckBox myPatternCheckBox;
private TextFieldWithBrowseButton myTestFolderTextField;
private TextFieldWithBrowseButton myTestScriptTextField;
private JTextField myTestMethodTextField;
private JTextField myTestClassTextField;
private JTextField myPatternTextField;
private JTextField myParamTextField;
private JCheckBox myParamCheckBox;
private final Project myProject;
private final AbstractPyCommonOptionsForm myCommonOptionsForm;
private JComponent anchor;
public PythonTestRunConfigurationForm(final Project project,
final AbstractPythonTestRunConfiguration configuration) {
myProject = project;
myCommonOptionsForm = PyCommonOptionsFormFactory.getInstance().createForm(configuration);
myCommonOptionsPlaceholder.add(myCommonOptionsForm.getMainPanel(), BorderLayout.CENTER);
initComponents();
setAnchor(myTestMethodComponent.getLabel());
myTestFolderTextField.addBrowseFolderListener(PyBundle.message("runcfg.unittest.dlg.select.folder.path"), null, myProject,
FileChooserDescriptorFactory.createSingleFolderDescriptor());
myTestScriptTextField.addBrowseFolderListener(PyBundle.message("runcfg.unittest.dlg.select.script.path"), null, myProject,
FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor());
myPatternCheckBox.setSelected(configuration.usePattern());
myParamTextField.setVisible(false);
myParamCheckBox.setVisible(false);
setPatternVisible(getTestType() == AbstractPythonTestRunConfiguration.TestType.TEST_FOLDER);
}
public AbstractPythonRunConfigurationParams getBaseParams() {
return myCommonOptionsForm;
}
private void initComponents() {
final ActionListener testTypeListener = new ActionListener() {
public void actionPerformed(final ActionEvent e) {
setTestType(getTestType());
}
};
addTestTypeListener(testTypeListener);
myPatternCheckBox.addActionListener(new ActionListener() {
public void actionPerformed(final ActionEvent e) {
myPatternTextField.setEnabled(myPatternCheckBox.isSelected());
}
});
myParamCheckBox.addActionListener(new ActionListener() {
public void actionPerformed(final ActionEvent e) {
myParamTextField.setEnabled(myParamCheckBox.isSelected());
}
});
}
public void addTestTypeListener(ActionListener testTypeListener) {
myAllInFolderRB.addActionListener(testTypeListener);
myTestScriptRB.addActionListener(testTypeListener);
myTestClassRB.addActionListener(testTypeListener);
myTestMethodRB.addActionListener(testTypeListener);
myTestFunctionRB.addActionListener(testTypeListener);
}
public String getClassName() {
return myTestClassTextField.getText().trim();
}
public void setClassName(String className) {
myTestClassTextField.setText(className);
}
public String getPattern() {
return myPatternTextField.getText().trim();
}
public void setPattern(String pattern) {
myPatternTextField.setText(pattern);
}
public String getFolderName() {
return toSystemIndependentName(myTestFolderTextField.getText().trim());
}
public void setFolderName(String folderName) {
myTestFolderTextField.setText(FileUtil.toSystemDependentName(folderName));
}
public String getScriptName() {
return toSystemIndependentName(myTestScriptTextField.getText().trim());
}
public void setScriptName(String scriptName) {
myTestScriptTextField.setText(FileUtil.toSystemDependentName(scriptName));
}
public String getMethodName() {
return myTestMethodTextField.getText().trim();
}
public void setMethodName(String methodName) {
myTestMethodTextField.setText(methodName);
}
public TestType getTestType() {
if (myAllInFolderRB.isSelected()) {
return TestType.TEST_FOLDER;
}
else if (myTestScriptRB.isSelected()) {
return TestType.TEST_SCRIPT;
}
else if (myTestClassRB.isSelected()) {
return TestType.TEST_CLASS;
}
else if (myTestMethodRB.isSelected()) {
return TestType.TEST_METHOD;
}
else {
return TestType.TEST_FUNCTION;
}
}
@Override
public JComponent getAnchor() {
return anchor;
}
@Override
public void setAnchor(JComponent anchor) {
this.anchor = anchor;
}
public void setPatternVisible(boolean b) {
myPatternTextField.setVisible(b);
myPatternCheckBox.setVisible(b);
}
private static void setSelectedIfNeeded(boolean condition, JRadioButton rb) {
if (condition) {
rb.setSelected(true);
}
}
public void setTestType(TestType testType) {
setSelectedIfNeeded(testType == TestType.TEST_FOLDER, myAllInFolderRB);
setSelectedIfNeeded(testType == TestType.TEST_SCRIPT, myTestScriptRB);
setSelectedIfNeeded(testType == TestType.TEST_CLASS, myTestClassRB);
setSelectedIfNeeded(testType == TestType.TEST_METHOD, myTestMethodRB);
setSelectedIfNeeded(testType == TestType.TEST_FUNCTION, myTestFunctionRB);
myTestFolderComponent.setVisible(testType == TestType.TEST_FOLDER);
myTestFolderTextField.setVisible(testType == TestType.TEST_FOLDER);
myTestScriptComponent.setVisible(testType != TestType.TEST_FOLDER);
myTestScriptTextField.setVisible(testType != TestType.TEST_FOLDER);
myTestClassComponent.setVisible(testType == TestType.TEST_CLASS || testType == TestType.TEST_METHOD);
myTestClassTextField.setVisible(testType == TestType.TEST_CLASS || testType == TestType.TEST_METHOD);
myTestMethodComponent.setVisible(testType == TestType.TEST_METHOD || testType == TestType.TEST_FUNCTION);
myTestMethodTextField.setVisible(testType == TestType.TEST_METHOD || testType == TestType.TEST_FUNCTION);
myPatternTextField.setEnabled(myPatternCheckBox.isSelected());
myParamTextField.setEnabled(myParamCheckBox.isSelected());
myTestMethodComponent.getLabel().setText(testType == TestType.TEST_METHOD? PyBundle.message("runcfg.unittest.dlg.method_label")
: PyBundle.message("runcfg.unittest.dlg.function_label"));
}
public JComponent getPanel() {
return myRootPanel;
}
public JPanel getAdditionalPanel() {
return myAdditionalPanel;
}
public JPanel getTestsPanel() {
return myTestsPanel;
}
public JTextField getPatternComponent() {
return myPatternTextField;
}
@Override
public boolean usePattern() {
return myPatternCheckBox.isSelected();
}
@Override
public void usePattern(boolean usePattern) {
myPatternCheckBox.setSelected(usePattern);
}
public String getParams() {
return myParamTextField.getText().trim();
}
public JCheckBox getParamCheckBox() {
return myParamCheckBox;
}
public void setParams(String params) {
myParamTextField.setText(params);
}
public void setParamsVisible() {
myParamTextField.setVisible(true);
myParamCheckBox.setVisible(true);
}
}
|
package org.camunda.bpm;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response.Status;
import org.junit.Before;
import org.junit.Test;
import com.sun.jersey.api.client.ClientResponse;
public class ErrorPageIT extends AbstractWebIntegrationTest {
@Before
public void createClient() throws Exception {
createClient(getWebappCtxPath());
}
@Test
public void shouldCheckNonFoundResponse() {
// when
ClientResponse response = client.resource(APP_BASE_PATH + "nonexisting")
.get(ClientResponse.class);
// then
assertEquals(Status.NOT_FOUND.getStatusCode(), response.getStatus());
assertTrue(response.getType().toString().startsWith(MediaType.TEXT_HTML));
String responseEntity = response.getEntity(String.class);
assertTrue(responseEntity.contains("Camunda"));
assertTrue(responseEntity.contains("Not Found"));
// cleanup
response.close();
}
}
|
package com.futuresimple.base.richedit.ui;
import com.futuresimple.base.richedit.text.EffectsHandler;
import com.futuresimple.base.richedit.text.HtmlParsingListener;
import com.futuresimple.base.richedit.text.style.BulletSpan;
import com.futuresimple.base.richedit.text.style.URLSpan;
import com.futuresimple.base.richedit.text.style.UnorderedListSpan;
import com.nostra13.universalimageloader.core.ImageLoader;
import com.nostra13.universalimageloader.core.assist.FailReason;
import com.nostra13.universalimageloader.core.assist.ImageSize;
import com.nostra13.universalimageloader.core.assist.ViewScaleType;
import com.nostra13.universalimageloader.core.imageaware.NonViewAware;
import com.nostra13.universalimageloader.core.listener.ImageLoadingListener;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Parcel;
import android.os.Parcelable;
import android.text.Spanned;
import android.text.TextUtils.TruncateAt;
import android.util.AttributeSet;
import android.view.View;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class CustomSpannableEditText extends FixedSelectionEditText implements ImageLoadingListener, HtmlParsingListener {
private final Set<String> mImagesToLoad = new HashSet<>();
private CustomSpansState mLastState = null;
private boolean mStateRestored;
public final CustomSpansState getLastState() {
return mLastState;
}
public final boolean isStateRestored() {
return mStateRestored;
}
public CustomSpannableEditText(final Context context) {
super(context);
}
public CustomSpannableEditText(final Context context, final AttributeSet attrs) {
super(context, attrs);
}
public CustomSpannableEditText(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
}
private void applyBulletLists(final List<BulletListHolder> bulletListHolders) {
for (final BulletListHolder bulletListHolder : bulletListHolders) {
final UnorderedListSpan bulletList = new UnorderedListSpan();
getText().setSpan(bulletList, bulletListHolder.getStart(), bulletListHolder.getEnd(), Spanned.SPAN_PARAGRAPH);
for (final BulletHolder bulletHolder : bulletListHolder.getBulletHolders()) {
final BulletSpan bullet = new BulletSpan(bulletHolder.getRadius(), bulletHolder.getGap(), bulletHolder.getColor(), bulletHolder.isWantColor());
getText().setSpan(bullet, bulletHolder.getStart(), bulletHolder.getEnd(), Spanned.SPAN_PARAGRAPH);
bulletList.addItem(bullet);
}
}
}
private void applyLinks(final List<LinkHolder> linkHolders) {
for (final LinkHolder linkHolder : linkHolders) {
getText().setSpan(
new URLSpan(linkHolder.getLink(), linkHolder.getTitle(), linkHolder.getTarget()),
linkHolder.getStart(), linkHolder.getEnd(),
Spanned.SPAN_EXCLUSIVE_EXCLUSIVE
);
}
}
@Override
public void onRestoreInstanceState(final Parcelable state) {
if (!(state instanceof CustomSpansState)) {
super.onRestoreInstanceState(state);
return;
}
final CustomSpansState customState = (CustomSpansState) state;
if (getEditableText().length() == 0) {
super.onRestoreInstanceState(customState.getSuperState());
}
applyBulletLists(customState.getBulletListHolders());
applyLinks(customState.getLinkHolders());
mLastState = null;
mStateRestored = true;
}
private List<BulletListHolder> removeAllBulletLists() {
final List<BulletListHolder> bulletListHolders = new ArrayList<>();
final UnorderedListSpan[] bulletLists = getText().getSpans(0, getText().length(), UnorderedListSpan.class);
for (final UnorderedListSpan bulletList: bulletLists) {
final BulletListHolder bulletListHolder = new BulletListHolder(getText().getSpanStart(bulletList), getText().getSpanEnd(bulletList));
for (final BulletSpan bulletSpan : bulletList.getItems()) {
bulletListHolder.addBulletHolder(new BulletHolder(
bulletSpan.getBulletRadius(),
bulletSpan.getGapWidth(),
bulletSpan.getColor(),
bulletSpan.isWantColor(),
getText().getSpanStart(bulletSpan),
getText().getSpanEnd(bulletSpan)
));
getText().removeSpan(bulletSpan);
}
bulletListHolders.add(bulletListHolder);
getText().removeSpan(bulletList);
}
return bulletListHolders;
}
private List<LinkHolder> removeAllLinks() {
final List<LinkHolder> linkHolders = new ArrayList<>();
final URLSpan[] links = getText().getSpans(0, getText().length(), URLSpan.class);
for (final URLSpan link : links) {
linkHolders.add(new LinkHolder(link.getUrl(), link.getTitle(), link.getTarget(), getText().getSpanStart(link), getText().getSpanEnd(link)));
getText().removeSpan(link);
}
return linkHolders;
}
@Override
public Parcelable onSaveInstanceState() {
// do not call removeAllLinks() inline!!!
// All "removing" operations have to be done before calling super.onSaveInstanceState() !
final List<BulletListHolder> bulletListHolders = removeAllBulletLists();
final List<LinkHolder> linkHolders = removeAllLinks();
mStateRestored = false;
mLastState = new CustomSpansState(super.onSaveInstanceState(), bulletListHolders, linkHolders);
return mLastState;
}
@Override
public final void onLoadingStarted(final String source, final View view) {
// nothing to do
}
@Override
public final void onLoadingFailed(final String source, final View view, final FailReason failReason) {
EffectsHandler.applyImageLoadingFailedImageSpan(getText(), getResources(), source);
nullLayouts();
}
@Override
public final void onLoadingComplete(final String source, final View view, final Bitmap bitmap) {
final Drawable drawable = new BitmapDrawable(getResources(), bitmap);
EffectsHandler.applyLoadedImageSpan(getText(), source, getMeasuredWidth(), drawable);
nullLayouts();
}
@Override
public final void onLoadingCancelled(final String source, final View view) {
EffectsHandler.applyImageLoadingFailedImageSpan(getText(), getResources(), source);
nullLayouts();
}
private void nullLayouts() {
//This is ugly hack to call private method "nullLayouts" from TextView
setEllipsize(TruncateAt.END);
setEllipsize(null);
}
@Override
public final void onImageFound(final String source) {
mImagesToLoad.add(source);
}
@Override
public final void onParsingFinished() {
for (final String imageUri : mImagesToLoad) {
// 0x0 size means unknown
// also! do not pass uri parameter below (when creating NonViewAware object)
final NonViewAware nonViewAware = new NonViewAware(new ImageSize(0, 0), ViewScaleType.CROP);
ImageLoader.getInstance().displayImage(imageUri, nonViewAware, this);
}
mImagesToLoad.clear();
}
static final class CustomSpansState extends BaseSavedState {
private final List<BulletListHolder> mBulletListHolders = new ArrayList<>();
private final List<LinkHolder> mLinkHolders = new ArrayList<>();
public CustomSpansState(final Parcel source) {
super(source);
readBulletLists(source);
readLinks(source);
}
public CustomSpansState(final Parcelable superState, final List<BulletListHolder> bulletListHolders, final List<LinkHolder> linkHolders) {
super(superState);
if (bulletListHolders != null) {
mBulletListHolders.addAll(bulletListHolders);
}
if (linkHolders != null) {
mLinkHolders.addAll(linkHolders);
}
}
private void readBulletLists(final Parcel source) {
final int bulletListsCount = source.readInt();
if (bulletListsCount > 0) {
for (int i = 0; i < bulletListsCount; i++) {
final BulletListHolder bulletListHolder = new BulletListHolder(source.readInt(), source.readInt());
final int bulletsCount = source.readInt();
if (bulletsCount > 0) {
for (int j = 0; j < bulletsCount; j++) {
bulletListHolder.addBulletHolder(new BulletHolder(
source.readInt(),
source.readInt(),
source.readInt(),
source.readInt() != 0,
source.readInt(),
source.readInt()
));
}
}
mBulletListHolders.add(bulletListHolder);
}
}
}
private void readLinks(final Parcel source) {
final int linksCount = source.readInt();
if (linksCount > 0) {
for (int i = 0; i < linksCount; i++) {
mLinkHolders.add(new LinkHolder(
source.readString(),
source.readString(),
source.readString(),
source.readInt(),
source.readInt()
));
}
}
}
public final List<BulletListHolder> getBulletListHolders() {
return mBulletListHolders;
}
public final List<LinkHolder> getLinkHolders() {
return mLinkHolders;
}
private void writeBulletLists(final Parcel destination) {
destination.writeInt(mBulletListHolders.size());
for (final BulletListHolder bulletListHolder : mBulletListHolders) {
destination.writeInt(bulletListHolder.getStart());
destination.writeInt(bulletListHolder.getEnd());
destination.writeInt(bulletListHolder.getBulletHolders().size());
for (final BulletHolder bulletHolder : bulletListHolder.getBulletHolders()) {
destination.writeInt(bulletHolder.getRadius());
destination.writeInt(bulletHolder.getGap());
destination.writeInt(bulletHolder.getColor());
destination.writeInt(bulletHolder.isWantColor() ? 1 : 0);
destination.writeInt(bulletHolder.getStart());
destination.writeInt(bulletHolder.getEnd());
}
}
}
private void writeLinks(final Parcel destination) {
destination.writeInt(mLinkHolders.size());
for (final LinkHolder linkHolder : mLinkHolders) {
destination.writeString(linkHolder.getLink());
destination.writeString(linkHolder.getTitle());
destination.writeString(linkHolder.getTarget());
destination.writeInt(linkHolder.getStart());
destination.writeInt(linkHolder.getEnd());
}
}
@Override
public void writeToParcel(final Parcel destination, final int flags) {
super.writeToParcel(destination, flags);
writeBulletLists(destination);
writeLinks(destination);
}
public static final Parcelable.Creator<CustomSpansState> CREATOR =
new Parcelable.Creator<CustomSpansState>() {
@Override
public final CustomSpansState createFromParcel(final Parcel source) {
return new CustomSpansState(source);
}
@Override
public final CustomSpansState[] newArray(final int size) {
return new CustomSpansState[size];
}
};
}
static class SpanHolder {
private final int mStart;
private final int mEnd;
public SpanHolder(final int start, final int end) {
mStart = start;
mEnd = end;
}
public final int getStart() {
return mStart;
}
public final int getEnd() {
return mEnd;
}
public final int getSize() {
return mEnd - mStart;
}
}
static final class LinkHolder extends SpanHolder {
private final String mLink;
private final String mTitle;
private final String mTarget;
public LinkHolder(final String link, final String title, final String target, final int start, final int end) {
super(start, end);
mLink = link;
mTitle = title;
mTarget = target;
}
public final String getLink() {
return mLink;
}
public final String getTitle() {
return mTitle;
}
public final String getTarget() {
return mTarget;
}
}
static final class BulletHolder extends SpanHolder {
private final int mRadius;
private final int mColor;
private final int mGap;
private final boolean mWantColor;
public BulletHolder(final int radius, final int gap, final int color, final boolean wantColor, final int start, final int end) {
super(start, end);
mRadius = radius;
mColor = color;
mGap = gap;
mWantColor = wantColor;
}
public final int getRadius() {
return mRadius;
}
public final int getColor() {
return mColor;
}
public final int getGap() {
return mGap;
}
public final boolean isWantColor() {
return mWantColor;
}
}
static final class BulletListHolder extends SpanHolder {
private List<BulletHolder> mBulletHolders = new ArrayList<>();
public BulletListHolder(int start, int end) {
super(start, end);
}
public final void addBulletHolder(final BulletHolder bulletHolder) {
if (bulletHolder != null) {
mBulletHolders.add(bulletHolder);
}
}
public final List<BulletHolder> getBulletHolders() {
return mBulletHolders;
}
}
}
|
package org.tanaguru.rules.rgaa30;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.jsoup.nodes.Element;
import org.tanaguru.entity.audit.TestSolution;
import org.tanaguru.entity.reference.Nomenclature;
import org.tanaguru.processor.SSPHandler;
import org.tanaguru.ruleimplementation.AbstractPageRuleMarkupImplementation;
import org.tanaguru.ruleimplementation.ElementHandler;
import org.tanaguru.ruleimplementation.ElementHandlerImpl;
import org.tanaguru.ruleimplementation.TestSolutionHandler;
import org.tanaguru.rules.elementchecker.ElementChecker;
import org.tanaguru.rules.elementchecker.element.ElementWithAttributePresenceChecker;
import org.tanaguru.rules.elementselector.SimpleElementSelector;
import org.tanaguru.rules.elementselector.builder.CssLikeSelectorBuilder;
import static org.tanaguru.rules.keystore.AttributeStore.HEIGHT_ATTR;
import static org.tanaguru.rules.keystore.AttributeStore.WIDTH_ATTR;
import static org.tanaguru.rules.keystore.CssLikeQueryStore.ELEMENT_WITH_HEIGHT_ATTR_NOT_IMG_V2;
import static org.tanaguru.rules.keystore.CssLikeQueryStore.ELEMENT_WITH_WITDH_ATTR_NOT_IMG_V2;
import static org.tanaguru.rules.keystore.CssLikeQueryStore.ELEMENT_WITH_HEIGHT_ATTR_NOT_IMG;
import static org.tanaguru.rules.keystore.CssLikeQueryStore.ELEMENT_WITH_WITDH_ATTR_NOT_IMG;
import org.tanaguru.rules.keystore.HtmlElementStore;
import static org.tanaguru.rules.keystore.RemarkMessageStore.PRESENTATION_ATTR_DETECTED_MSG;
public class Rgaa30Rule100102 extends AbstractPageRuleMarkupImplementation {
private static final String PRESENTATION_ATTR_NOM =
"DeprecatedRepresentationAttributes";
private final Map<String, ElementHandler> attrElementHandlerMap = new LinkedHashMap<>();
/* the total number of elements */
int totalNumberOfElements = 0;
/**
* Default constructor
*/
public Rgaa30Rule100102 () {
super();
}
@Override
protected void select(SSPHandler sspHandler) {
totalNumberOfElements = sspHandler.getTotalNumberOfElements();
// retrieve element from the nomenclature
Nomenclature deprecatedHtmlAttr = nomenclatureLoaderService.
loadByCode(PRESENTATION_ATTR_NOM);
for (String deprecatedAttr : deprecatedHtmlAttr.getValueList()) {
SimpleElementSelector sec =
new SimpleElementSelector(buildQuery(deprecatedAttr));
ElementHandler<Element> eh = new ElementHandlerImpl();
sec.selectElements(sspHandler, eh);
attrElementHandlerMap.put(deprecatedAttr, eh);
}
// elements with width attribute that are not img
SimpleElementSelector secWidthAttrNotImg =
new SimpleElementSelector(ELEMENT_WITH_WITDH_ATTR_NOT_IMG_V2);
ElementHandler<Element> ehWithAttrNotImg = new ElementHandlerImpl();
secWidthAttrNotImg.selectElements(sspHandler, ehWithAttrNotImg);
attrElementHandlerMap.put(WIDTH_ATTR, ehWithAttrNotImg);
// elements with width attribute that are not img
SimpleElementSelector secHeightAttrNotImg =
new SimpleElementSelector(ELEMENT_WITH_HEIGHT_ATTR_NOT_IMG_V2);
ElementHandler<Element> ehHeightAttrNotImg = new ElementHandlerImpl();
secHeightAttrNotImg.selectElements(sspHandler, ehHeightAttrNotImg);
attrElementHandlerMap.put(HEIGHT_ATTR, ehHeightAttrNotImg);
}
@Override
protected void check(SSPHandler sspHandler, TestSolutionHandler testSolutionHandler) {
// Attributes checks
for (Map.Entry<String, ElementHandler> attrElementHandlerMapEntry :
attrElementHandlerMap.entrySet()) {
ElementChecker attrEc = new ElementWithAttributePresenceChecker(
new ImmutablePair(TestSolution.FAILED, PRESENTATION_ATTR_DETECTED_MSG),
new ImmutablePair(TestSolution.PASSED,""),
attrElementHandlerMapEntry.getKey()
);
attrEc.check(
sspHandler,
attrElementHandlerMapEntry.getValue(),
testSolutionHandler);
}
}
@Override
public int getSelectionSize() {
return totalNumberOfElements;
}
/**
*
* @param attributeName
* @return return the query regarding the attributeName excluding the svg
* elements
*/
private String buildQuery(String attributeName) {
StringBuilder strb = new StringBuilder();
strb.append(CssLikeSelectorBuilder.
buildSelectorFromElementDifferentFromAndAttribute(
HtmlElementStore.SVG_ELEMENT,
attributeName));
strb.append(CssLikeSelectorBuilder.
buildSelectorFromAttributeAndParentDifferentFrom(
HtmlElementStore.SVG_ELEMENT,
attributeName));
return strb.toString();
}
}
|
package net.runelite.client.game;
import lombok.Getter;
import static net.runelite.api.NullObjectID.*;
import static net.runelite.api.ObjectID.*;
import net.runelite.api.coords.WorldPoint;
@Getter
public enum AgilityShortcut
{
GENERIC_SHORTCUT(1, "Shortcut", null,
// Trollheim
ROCKS_3790, ROCKS_3791,
// Fremennik Slayer Cave
STEPS_29993,
// Fossil Island
LADDER_30938, LADDER_30939, LADDER_30940, LADDER_30941, RUBBER_CAP_MUSHROOM,
// Brimhaven dungeon
CREVICE_30198,
// Lumbridge
STILE_12982,
// Gu'Tanoth Bridge
GAP, GAP_2831,
// Lumbridge Swamp Caves
STEPPING_STONE_5948, STEPPING_STONE_5949, ROCKS_6673,
// Morytania Pirate Ship
ROCK_16115,
// Lumber Yard
BROKEN_FENCE_2618,
// McGrubor's Wood
LOOSE_RAILING,
// Underwater Area Fossil Island
TUNNEL_30959, HOLE_30966, OBSTACLE, OBSTACLE_30767, OBSTACLE_30964, OBSTACLE_30962, PLANT_DOOR_30961,
// Tree Gnome Village
LOOSE_RAILING_2186,
// Burgh de Rott
LOW_FENCE,
// Taverley
STILE,
// Asgarnian Ice Dungeon
STEPS,
// Fossil Island Wyvern Cave
STAIRS_31485,
// Trollweiss Mountain Cave
ROCKY_HANDHOLDS, ROCKY_HANDHOLDS_19847,
// Witchaven Dungeon
SHORTCUT),
BRIMHAVEN_DUNGEON_MEDIUM_PIPE_RETURN(1, "Pipe Squeeze", null, new WorldPoint(2698, 9491, 0), PIPE_21727),
BRIMHAVEN_DUNGEON_PIPE_RETURN(1, "Pipe Squeeze", null, new WorldPoint(2655, 9573, 0), PIPE_21728),
BRIMHAVEN_DUNGEON_STEPPING_STONES_RETURN(1, "Pipe Squeeze", null, STEPPING_STONE_21739),
BRIMHAVEN_DUNGEON_LOG_BALANCE_RETURN(1, "Log Balance", null, LOG_BALANCE_20884),
AGILITY_PYRAMID_ROCKS_WEST(1, "Rocks", null, CLIMBING_ROCKS_11948),
CAIRN_ISLE_CLIMBING_ROCKS(1, "Rocks", null, CLIMBING_ROCKS),
KARAMJA_GLIDER_LOG(1, "Log Balance", new WorldPoint(2906, 3050, 0), A_WOODEN_LOG ),
FALADOR_CRUMBLING_WALL(5, "Crumbling Wall", new WorldPoint(2936, 3357, 0), CRUMBLING_WALL_24222 ),
RIVER_LUM_GRAPPLE_WEST(8, "Grapple Broken Raft", new WorldPoint(3245, 3179, 0), BROKEN_RAFT),
RIVER_LUM_GRAPPLE_EAST(8, "Grapple Broken Raft", new WorldPoint(3258, 3179, 0), BROKEN_RAFT),
CORSAIR_COVE_ROCKS(10, "Rocks", new WorldPoint(2545, 2871, 0), ROCKS_31757),
KARAMJA_MOSS_GIANT_SWING(10, "Rope", null, ROPESWING_23568, ROPESWING_23569),
FALADOR_GRAPPLE_WALL(11, "Grapple Wall", new WorldPoint(3031, 3391, 0), WALL_17049, WALL_17050),
BRIMHAVEN_DUNGEON_STEPPING_STONES(12, "Stepping Stones", null, STEPPING_STONE_21738),
VARROCK_SOUTH_FENCE(13, "Fence", new WorldPoint(3239, 3334, 0), FENCE_16518),
GOBLIN_VILLAGE_WALL(14, "Wall", new WorldPoint(2925, 3523, 0), TIGHTGAP),
CORSAIR_COVE_DUNGEON_PILLAR(15, "Pillar Jump", new WorldPoint(1980, 8996, 0), PILLAR_31809),
EDGEVILLE_DUNGEON_MONKEYBARS(15, "Monkey Bars", null, MONKEYBARS_23566),
TROLLHEIM_ROCKS(15, "Rocks", null, new WorldPoint(2838, 3614, 0), ROCKS_3748), // No fixed world map location, but rocks near death plateau have a requirement of 15
YANILLE_UNDERWALL_TUNNEL(16, "Underwall Tunnel", new WorldPoint(2574, 3109, 0), HOLE_16520, CASTLE_WALL),
KOUREND_CATACOMBS_SOUTH_WEST_CREVICE_NORTH(17, "Crevice", new WorldPoint(1647, 10008, 0), CRACK_28892),
KOUREND_CATACOMBS_SOUTH_WEST_CREVICE_SOUTH(17, "Crevice", new WorldPoint(1645, 10001, 0), CRACK_28892),
YANILLE_WATCHTOWER_TRELLIS(18, "Trellis", null, TRELLIS_20056),
COAL_TRUCKS_LOG_BALANCE(20, "Log Balance", new WorldPoint(2598, 3475, 0), LOG_BALANCE_23274),
GRAND_EXCHANGE_UNDERWALL_TUNNEL(21, "Underwall Tunnel", new WorldPoint(3139, 3515, 0), UNDERWALL_TUNNEL_16529, UNDERWALL_TUNNEL_16530),
BRIMHAVEN_DUNGEON_PIPE(22, "Pipe Squeeze", new WorldPoint(2654, 9569, 0), PIPE_21728),
OBSERVATORY_SCALE_CLIFF(23, "Grapple Rocks", new WorldPoint(2447, 3155, 0), NULL_31849),
EAGLES_PEAK_ROCK_CLIMB(25, "Rock Climb", new WorldPoint(2320, 3499, 0), ROCKS_19849),
FALADOR_UNDERWALL_TUNNEL(26, "Underwall Tunnel", new WorldPoint(2947, 3313, 0), UNDERWALL_TUNNEL, UNDERWALL_TUNNEL_16528),
KOUREND_CATACOMBS_PILLAR_JUMP_NORTH(28, "Pillar Jump", new WorldPoint(1613, 10071, 0)),
KOUREND_CATACOMBS_PILLAR_JUMP_SOUTH(28, "Pillar Jump", new WorldPoint(1609, 10060, 0)),
MOUNT_KARUULM_LOWER(29, "Rocks", new WorldPoint(1324, 3782, 0), ROCKS_34397),
CORSAIR_COVE_RESOURCE_ROCKS(30, "Rocks", new WorldPoint(2486, 2898, 0), ROCKS_31758, ROCKS_31759),
SOUTHEAST_KARAJMA_STEPPING_STONES(30, "Stepping Stones", new WorldPoint(2924, 2946, 0), STEPPING_STONES, STEPPING_STONES_23646, STEPPING_STONES_23647),
BRIMHAVEN_DUNGEON_LOG_BALANCE(30, "Log Balance", null, LOG_BALANCE_20882),
AGILITY_PYRAMID_ROCKS_EAST(30, "Rocks", null, CLIMBING_ROCKS_11949),
DRAYNOR_MANOR_STEPPING_STONES(31, "Stepping Stones", new WorldPoint(3150, 3362, 0), STEPPING_STONE_16533),
CATHERBY_CLIFFSIDE_GRAPPLE(32, "Grapple Rock", new WorldPoint(2868, 3429, 0), ROCKS_17042),
CAIRN_ISLE_ROCKS(32, "Rocks", null, ROCKS_2231),
ARDOUGNE_LOG_BALANCE(33, "Log Balance", new WorldPoint(2602, 3336, 0), LOG_BALANCE_16546, LOG_BALANCE_16547, LOG_BALANCE_16548),
BRIMHAVEN_DUNGEON_MEDIUM_PIPE(34, "Pipe Squeeze", null, new WorldPoint(2698, 9501, 0), PIPE_21727),
KOUREND_CATACOMBS_NORTH_EAST_CREVICE_NORTH(34, "Crevice", new WorldPoint(1715, 10057, 0), CRACK_28892),
KOUREND_CATACOMBS_NORTH_EAST_CREVICE_SOUTH(34, "Crevice", new WorldPoint(1705, 10077, 0), CRACK_28892),
CATHERBY_OBELISK_GRAPPLE(36, "Grapple Rock", new WorldPoint(2841, 3434, 0), CROSSBOW_TREE_17062),
GNOME_STRONGHOLD_ROCKS(37, "Rocks", new WorldPoint(2485, 3515, 0), ROCKS_16534, ROCKS_16535),
AL_KHARID_MINING_PITCLIFF_SCRAMBLE(38, "Rocks", new WorldPoint(3305, 3315, 0), ROCKS_16549, ROCKS_16550),
YANILLE_WALL_GRAPPLE(39, "Grapple Wall", new WorldPoint(2552, 3072, 0), WALL_17047),
NEITIZNOT_BRIDGE_REPAIR(40, "Bridge Repair - Quest", new WorldPoint(2315, 3828, 0), ROPE_BRIDGE_21306, ROPE_BRIDGE_21307),
NEITIZNOT_BRIDGE_SOUTHEAST(40, "Rope Bridge", null, ROPE_BRIDGE_21308, ROPE_BRIDGE_21309),
NEITIZNOT_BRIDGE_NORTHWEST(40, "Rope Bridge", null, ROPE_BRIDGE_21310, ROPE_BRIDGE_21311),
NEITIZNOT_BRIDGE_NORTH(40, "Rope Bridge", null, ROPE_BRIDGE_21312, ROPE_BRIDGE_21313),
NEITIZNOT_BRIDGE_NORTHEAST(40, "Broken Rope bridge", null, ROPE_BRIDGE_21314, ROPE_BRIDGE_21315),
KOUREND_LAKE_JUMP_EAST(40, "Stepping Stones", new WorldPoint(1612, 3570, 0), STEPPING_STONE_29729, STEPPING_STONE_29730),
KOUREND_LAKE_JUMP_WEST(40, "Stepping Stones", new WorldPoint(1604, 3572, 0), STEPPING_STONE_29729, STEPPING_STONE_29730),
YANILLE_DUNGEON_BALANCE(40, "Balancing Ledge", null, BALANCING_LEDGE_23548),
TROLLHEIM_EASY_CLIFF_SCRAMBLE(41, "Rocks", new WorldPoint(2869, 3670, 0), ROCKS_16521),
DWARVEN_MINE_NARROW_CREVICE(42, "Narrow Crevice", new WorldPoint(3034, 9806, 0), CREVICE_16543),
DRAYNOR_UNDERWALL_TUNNEL(42, "Underwall Tunnel", new WorldPoint(3068, 3261, 0), UNDERWALL_TUNNEL_19032, UNDERWALL_TUNNEL_19036),
TROLLHEIM_MEDIUM_CLIFF_SCRAMBLE_NORTH(43, "Rocks", new WorldPoint(2886, 3684, 0), ROCKS_3803, ROCKS_3804, ROCKS_16522),
TROLLHEIM_MEDIUM_CLIFF_SCRAMBLE_SOUTH(43, "Rocks", new WorldPoint(2876, 3666, 0), ROCKS_3803, ROCKS_3804, ROCKS_16522),
TROLLHEIM_ADVANCED_CLIFF_SCRAMBLE(44, "Rocks", new WorldPoint(2907, 3686, 0), ROCKS_16523, ROCKS_3748),
KOUREND_RIVER_STEPPING_STONES(45, "Stepping Stones", new WorldPoint(1720, 3551, 0), STEPPING_STONE_29728),
TIRANNWN_LOG_BALANCE(45, "Log Balance", null, LOG_BALANCE_3933, LOG_BALANCE_3931, LOG_BALANCE_3930, LOG_BALANCE_3929, LOG_BALANCE_3932),
COSMIC_ALTAR_MEDIUM_WALKWAY(46, "Narrow Walkway", new WorldPoint(2399, 4403, 0), JUTTING_WALL_17002),
DEEP_WILDERNESS_DUNGEON_CREVICE_NORTH(46, "Narrow Crevice", new WorldPoint(3047, 10335, 0), CREVICE_19043),
DEEP_WILDERNESS_DUNGEON_CREVICE_SOUTH(46, "Narrow Crevice", new WorldPoint(3045, 10327, 0), CREVICE_19043),
TROLLHEIM_HARD_CLIFF_SCRAMBLE(47, "Rocks", new WorldPoint(2902, 3680, 0), ROCKS_16524),
FREMENNIK_LOG_BALANCE(48, "Log Balance", new WorldPoint(2721, 3591, 0), LOG_BALANCE_16540, LOG_BALANCE_16541, LOG_BALANCE_16542),
YANILLE_DUNGEON_PIPE_SQUEEZE(49, "Pipe Squeeze", null, OBSTACLE_PIPE_23140),
ARCEUUS_ESSENCE_MINE_BOULDER(49, "Boulder", new WorldPoint(1774, 3888, 0), BOULDER_27990),
MORYTANIA_STEPPING_STONE(50, "Stepping Stone", new WorldPoint(3418, 3326, 0), STEPPING_STONE_13504),
VARROCK_SEWERS_PIPE_SQUEEZE(51, "Pipe Squeeze", new WorldPoint(3152, 9905, 0), OBSTACLE_PIPE_16511),
ARCEUUS_ESSENCE_MINE_EAST_SCRAMBLE(52, "Rock Climb", new WorldPoint(1770, 3851, 0), ROCKS_27987, ROCKS_27988),
KARAMJA_VOLCANO_GRAPPLE_NORTH(53, "Grapple Rock", new WorldPoint(2873, 3143, 0), STRONG_TREE_17074),
KARAMJA_VOLCANO_GRAPPLE_SOUTH(53, "Grapple Rock", new WorldPoint(2874, 3128, 0), STRONG_TREE_17074),
MOTHERLODE_MINE_WALL_EAST(54, "Wall", new WorldPoint(3124, 9703, 0), DARK_TUNNEL_10047),
MOTHERLODE_MINE_WALL_WEST(54, "Wall", new WorldPoint(3118, 9702, 0), DARK_TUNNEL_10047),
MISCELLANIA_DOCK_STEPPING_STONE(55, "Stepping Stone", new WorldPoint(2572, 3862, 0), STEPPING_STONE_11768),
ISAFDAR_FOREST_OBSTACLES(56, "Trap", null, DENSE_FOREST_3938, DENSE_FOREST_3939, DENSE_FOREST_3998, DENSE_FOREST_3999, DENSE_FOREST, LEAVES, LEAVES_3924, LEAVES_3925, STICKS, TRIPWIRE, TRIPWIRE_3921),
RELEKKA_EAST_FENCE(57, "Fence", new WorldPoint(2688, 3697, 0), BROKEN_FENCE),
YANILLE_DUNGEON_MONKEY_BARS(57, "Monkey Bars", null, MONKEYBARS_23567),
PHASMATYS_ECTOPOOL_SHORTCUT(58, "Weathered Wall", null , WEATHERED_WALL, WEATHERED_WALL_16526),
ELVEN_OVERPASS_CLIFF_SCRAMBLE(59, "Rocks", new WorldPoint(2345, 3300, 0), ROCKS_16514, ROCKS_16515),
ELVEN_OVERPASS_CLIFF_SCRAMBLE_PRIFDDINAS(59, "Rocks", new WorldPoint(3369, 6052, 0), ROCKS_16514, ROCKS_16515),
WILDERNESS_GWD_CLIMB_EAST(60, "Rocks", new WorldPoint(2943, 3770, 0), ROCKY_HANDHOLDS_26400, ROCKY_HANDHOLDS_26401, ROCKY_HANDHOLDS_26402, ROCKY_HANDHOLDS_26404, ROCKY_HANDHOLDS_26405, ROCKY_HANDHOLDS_26406),
WILDERNESS_GWD_CLIMB_WEST(60, "Rocks", new WorldPoint(2928, 3760, 0), ROCKY_HANDHOLDS_26400, ROCKY_HANDHOLDS_26401, ROCKY_HANDHOLDS_26402, ROCKY_HANDHOLDS_26404, ROCKY_HANDHOLDS_26405, ROCKY_HANDHOLDS_26406),
MOS_LEHARMLESS_STEPPING_STONE(60, "Stepping Stone", new WorldPoint(3710, 2970, 0), STEPPING_STONE_19042),
WINTERTODT_GAP(60, "Gap", new WorldPoint(1629, 4023, 0), GAP_29326),
UNGAEL_ICE(60, "Ice Chunks", null, NULL_25337, NULL_29868, NULL_29869, NULL_29870, ICE_CHUNKS_31822, NULL_31823, ICE_CHUNKS_31990),
SLAYER_TOWER_MEDIUM_CHAIN_FIRST(61, "Spiked Chain (Floor 1)", new WorldPoint(3421, 3550, 0), SPIKEY_CHAIN),
SLAYER_TOWER_MEDIUM_CHAIN_SECOND(61, "Spiked Chain (Floor 2)", new WorldPoint(3420, 3551, 0), SPIKEY_CHAIN_16538),
SLAYER_DUNGEON_CREVICE(62, "Narrow Crevice", new WorldPoint(2729, 10008, 0), CREVICE_16539),
MOUNT_KARUULM_UPPER(62, "Rocks", new WorldPoint(1322, 3791, 0), ROCKS_34396),
TAVERLEY_DUNGEON_RAILING(63, "Loose Railing", new WorldPoint(2935, 9811, 0), LOOSE_RAILING_28849),
TROLLHEIM_WILDERNESS_ROCKS_EAST(64, "Rocks", new WorldPoint(2945, 3678, 0), ROCKS_16545),
TROLLHEIM_WILDERNESS_ROCKS_WEST(64, "Rocks", new WorldPoint(2917, 3672, 0), ROCKS_16545),
FOSSIL_ISLAND_VOLCANO(64, "Rope", new WorldPoint(3780, 3822, 0), ROPE_ANCHOR, ROPE_ANCHOR_30917),
MORYTANIA_TEMPLE(65, "Loose Railing", new WorldPoint(3422, 3476, 0), ROCKS_16998, ROCKS_16999, ORNATE_RAILING, ORNATE_RAILING_17000),
REVENANT_CAVES_GREEN_DRAGONS(65, "Jump", new WorldPoint(3220, 10086, 0), PILLAR_31561),
COSMIC_ALTAR_ADVANCED_WALKWAY(66, "Narrow Walkway", new WorldPoint(2408, 4401, 0), JUTTING_WALL_17002),
LUMBRIDGE_DESERT_STEPPING_STONE(66, "Stepping Stone", new WorldPoint(3210, 3135, 0), STEPPING_STONE_16513),
HEROES_GUILD_TUNNEL_EAST(67, "Crevice", new WorldPoint(2898, 9901, 0), CREVICE_9739, CREVICE_9740),
HEROES_GUILD_TUNNEL_WEST(67, "Crevice", new WorldPoint(2913, 9895, 0), CREVICE_9739, CREVICE_9740),
YANILLE_DUNGEON_RUBBLE_CLIMB(67, "Pile of Rubble", null, PILE_OF_RUBBLE_23563, PILE_OF_RUBBLE_23564),
ELVEN_OVERPASS_MEDIUM_CLIFF(68, "Rocks", new WorldPoint(2337, 3288, 0), ROCKS_16514, ROCKS_16515),
ELVEN_OVERPASS_MEDIUM_CLIFF_PRIFDDINAS(68, "Rocks", new WorldPoint(3361, 6040, 0), ROCKS_16514, ROCKS_16515),
WEISS_OBSTACLES(68, "Shortcut", null, LITTLE_BOULDER, ROCKSLIDE_33184, ROCKSLIDE_33185, NULL_33327, NULL_33328, LEDGE_33190, ROCKSLIDE_33191, FALLEN_TREE_33192),
ARCEUUS_ESSENSE_NORTH(69, "Rock Climb", new WorldPoint(1759, 3873, 0), ROCKS_34741),
TAVERLEY_DUNGEON_PIPE_BLUE_DRAGON(70, "Pipe Squeeze", new WorldPoint(2886, 9798, 0), OBSTACLE_PIPE_16509),
TAVERLEY_DUNGEON_ROCKS_NORTH(70, "Rocks", new WorldPoint(2887, 9823, 0), ROCKS, ROCKS_14106),
TAVERLEY_DUNGEON_ROCKS_SOUTH(70, "Rocks", new WorldPoint(2887, 9631, 0), ROCKS, ROCKS_14106),
FOSSIL_ISLAND_HARDWOOD_NORTH(70, "Hole" , new WorldPoint(3712, 3828, 0), HOLE_31481, HOLE_31482),
FOSSIL_ISLAND_HARDWOOD_SOUTH(70, "Hole" , new WorldPoint(3714, 3816, 0), HOLE_31481, HOLE_31482),
AL_KHARID_WINDOW(70, "Window", new WorldPoint(3293, 3158, 0), BROKEN_WALL_33344, BIG_WINDOW),
GWD_SARADOMIN_ROPE_NORTH(70, "Rope Descent", new WorldPoint(2912, 5300, 0), NULL_26371, NULL_26561),
GWD_SARADOMIN_ROPE_SOUTH(70, "Rope Descent", new WorldPoint(2951, 5267, 0), NULL_26375, NULL_26562),
SLAYER_TOWER_ADVANCED_CHAIN_FIRST(71, "Spiked Chain (Floor 2)", new WorldPoint(3447, 3578, 0), SPIKEY_CHAIN ),
SLAYER_TOWER_ADVANCED_CHAIN_SECOND(71, "Spiked Chain (Floor 3)", new WorldPoint(3446, 3576, 0), SPIKEY_CHAIN_16538),
STRONGHOLD_SLAYER_CAVE_TUNNEL(72, "Tunnel", new WorldPoint(2431, 9806, 0), TUNNEL_30174, TUNNEL_30175),
TROLL_STRONGHOLD_WALL_CLIMB(73, "Rocks", new WorldPoint(2841, 3694, 0), ROCKS_16464),
ARCEUUS_ESSENSE_MINE_WEST(73, "Rock Climb", new WorldPoint(1742, 3853, 0), ROCKS_27984, ROCKS_27985 ),
LAVA_DRAGON_ISLE_JUMP(74, "Stepping Stone", new WorldPoint(3200, 3807, 0), STEPPING_STONE_14918),
FORTHOS_DUNGEON_SPIKED_BLADES(75, "Spiked Blades", new WorldPoint(1819, 9946, 0), STRANGE_FLOOR_34834),
REVENANT_CAVES_DEMONS_JUMP(75, "Jump", new WorldPoint(3199, 10135, 0), PILLAR_31561),
REVENANT_CAVES_ANKOU_EAST(75, "Jump", new WorldPoint(3201, 10195, 0), PILLAR_31561),
REVENANT_CAVES_ANKOU_NORTH(75, "Jump", new WorldPoint(3180, 10209, 0), PILLAR_31561),
ZUL_ANDRA_ISLAND_CROSSING(76, "Stepping Stone", new WorldPoint(2156, 3073, 0), STEPPING_STONE_10663),
SHILO_VILLAGE_STEPPING_STONES( 77, "Stepping Stones", new WorldPoint(2863, 2974, 0), STEPPING_STONE_16466),
IORWERTHS_DUNGEON_NORTHERN_SHORTCUT_EAST(78, "Tight Gap", new WorldPoint(3221, 12441, 0), TIGHT_GAP),
IORWERTHS_DUNGEON_NORTHERN_SHORTCUT_WEST(78, "Tight Gap", new WorldPoint(3215, 12441, 0), TIGHT_GAP_36693),
KHARAZI_JUNGLE_VINE_CLIMB(79, "Vine", new WorldPoint(2897, 2939, 0), NULL_26884, NULL_26886),
TAVERLEY_DUNGEON_SPIKED_BLADES(80, "Strange Floor", new WorldPoint(2877, 9813, 0), STRANGE_FLOOR),
SLAYER_DUNGEON_CHASM_JUMP(81, "Spiked Blades", new WorldPoint(2770, 10003, 0), STRANGE_FLOOR_16544),
LAVA_MAZE_NORTH_JUMP(82, "Stepping Stone", new WorldPoint(3092, 3880, 0), STEPPING_STONE_14917),
BRIMHAVEN_DUNGEON_EAST_STEPPING_STONES_NORTH(83, "Stepping Stones", new WorldPoint(2685, 9547, 0), STEPPING_STONE_19040),
BRIMHAVEN_DUNGEON_EAST_STEPPING_STONES_SOUTH(83, "Stepping Stones", new WorldPoint(2693, 9529, 0), STEPPING_STONE_19040),
IORWERTHS_DUNGEON_SOUTHERN_SHORTCUT_EAST(84, "Tight Gap", new WorldPoint(3241, 12420, 0), TIGHT_GAP_36694),
IORWERTHS_DUNGEON_SOUTHERN_SHORTCUT_WEST(84, "Tight Gap", new WorldPoint(3231, 12420, 0), TIGHT_GAP_36695),
ELVEN_ADVANCED_CLIFF_SCRAMBLE(85, "Rocks", new WorldPoint(2337, 3253, 0), ROCKS_16514, ROCKS_16515),
ELVEN_ADVANCED_CLIFF_SCRAMBLE_PRIFDDINAS(85, "Rocks", new WorldPoint(3361, 6005, 0), ROCKS_16514, ROCKS_16515),
KALPHITE_WALL(86, "Crevice", new WorldPoint(3214, 9508, 0), CREVICE_16465),
BRIMHAVEN_DUNGEON_VINE_EAST(87, "Vine", new WorldPoint(2672, 9582, 0), VINE_26880, VINE_26882),
BRIMHAVEN_DUNGEON_VINE_WEST(87, "Vine", new WorldPoint(2606, 9584, 0), VINE_26880, VINE_26882),
MOUNT_KARUULM_PIPE_SOUTH(88, "Pipe", new WorldPoint(1316, 10214, 0), MYSTERIOUS_PIPE),
MOUNT_KARUULM_PIPE_NORTH(88, "Pipe", new WorldPoint(1345, 10230, 0), MYSTERIOUS_PIPE),
REVENANT_CAVES_CHAMBER_JUMP(89, "Jump", new WorldPoint(3240, 10144, 0), PILLAR_31561);
/**
* The agility level required to pass the shortcut
*/
@Getter
private final int level;
/**
* Brief description of the shortcut (e.g. 'Rocks', 'Stepping Stones', 'Jump')
*/
@Getter
private final String description;
/**
* The location of the Shortcut icon on the world map (null if there is no icon)
*/
@Getter
private final WorldPoint worldMapLocation;
/**
* An optional location in case the location of the shortcut icon is either
* null or isn't close enough to the obstacle
*/
@Getter
private final WorldPoint worldLocation;
/**
* Array of obstacles, null objects, decorations etc. that this shortcut uses.
* Typically an ObjectID/NullObjectID
*/
@Getter
private final int[] obstacleIds;
AgilityShortcut(int level, String description, WorldPoint mapLocation, WorldPoint worldLocation, int... obstacleIds)
{
this.level = level;
this.description = description;
this.worldMapLocation = mapLocation;
this.worldLocation = worldLocation;
this.obstacleIds = obstacleIds;
}
AgilityShortcut(int level, String description, WorldPoint location, int... obstacleIds)
{
this(level, description, location, location, obstacleIds);
}
public String getTooltip()
{
return description + " - Level " + level;
}
}
|
package com.handmark.pulltorefresh.sample;
import java.util.Arrays;
import java.util.LinkedList;
import android.app.ListActivity;
import android.os.AsyncTask;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import com.handmark.pulltorefresh.library.PullToRefreshBase.OnRefreshListener;
import com.handmark.pulltorefresh.library.PullToRefreshListView;
public class PullToRefreshListActivity extends ListActivity {
static final int MENU_MANUAL_REFRESH = 0;
static final int MENU_DISABLE_SCROLL = 1;
private LinkedList<String> mListItems;
private PullToRefreshListView mPullRefreshListView;
private ArrayAdapter<String> mAdapter;
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.pull_to_refresh_list);
mPullRefreshListView = (PullToRefreshListView) findViewById(R.id.pull_refresh_list);
// Set a listener to be invoked when the list should be refreshed.
mPullRefreshListView.setOnRefreshListener(new OnRefreshListener() {
@Override
public void onRefresh() {
// Do work to refresh the list here.
new GetDataTask().execute();
}
});
ListView actualListView = mPullRefreshListView.getRefreshableView();
mListItems = new LinkedList<String>();
mListItems.addAll(Arrays.asList(mStrings));
mAdapter = new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1, mListItems);
// You can also just use setListAdapter(mAdapter)
actualListView.setAdapter(mAdapter);
}
private class GetDataTask extends AsyncTask<Void, Void, String[]> {
@Override
protected String[] doInBackground(Void... params) {
// Simulates a background job.
try {
Thread.sleep(4000);
} catch (InterruptedException e) {
}
return mStrings;
}
@Override
protected void onPostExecute(String[] result) {
mListItems.addFirst("Added after refresh...");
mAdapter.notifyDataSetChanged();
// Call onRefreshComplete when the list has been refreshed.
mPullRefreshListView.onRefreshComplete();
super.onPostExecute(result);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
menu.add(0, MENU_MANUAL_REFRESH, 0, "Manual Refresh");
menu.add(0, MENU_DISABLE_SCROLL, 1,
mPullRefreshListView.isDisableScrollingWhileRefreshing() ? "Enable Scrolling while Refreshing"
: "Disable Scrolling while Refreshing");
return super.onCreateOptionsMenu(menu);
}
@Override
public boolean onPrepareOptionsMenu(Menu menu) {
MenuItem disableItem = menu.findItem(MENU_DISABLE_SCROLL);
disableItem
.setTitle(mPullRefreshListView.isDisableScrollingWhileRefreshing() ? "Enable Scrolling while Refreshing"
: "Disable Scrolling while Refreshing");
return super.onPrepareOptionsMenu(menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case MENU_MANUAL_REFRESH:
new GetDataTask().execute();
mPullRefreshListView.setRefreshing(false);
break;
case MENU_DISABLE_SCROLL:
mPullRefreshListView.setDisableScrollingWhileRefreshing(!mPullRefreshListView
.isDisableScrollingWhileRefreshing());
break;
}
return super.onOptionsItemSelected(item);
}
private String[] mStrings = { "Abbaye de Belloc", "Abbaye du Mont des Cats", "Abertam", "Abondance", "Ackawi",
"Acorn", "Adelost", "Affidelice au Chablis", "Afuega'l Pitu", "Airag", "Airedale", "Aisy Cendre",
"Allgauer Emmentaler", "Abbaye de Belloc", "Abbaye du Mont des Cats", "Abertam", "Abondance", "Ackawi",
"Acorn", "Adelost", "Affidelice au Chablis", "Afuega'l Pitu", "Airag", "Airedale", "Aisy Cendre",
"Allgauer Emmentaler" };
}
|
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.jme3.gde.ogretools;
import com.jme3.asset.AssetKey;
import com.jme3.gde.core.assets.ProjectAssetManager;
import com.jme3.gde.core.assets.SpatialAssetDataObject;
import com.jme3.gde.ogretools.convert.OgreXMLConvert;
import com.jme3.gde.ogretools.convert.OgreXMLConvertOptions;
import com.jme3.scene.Spatial;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.progress.ProgressHandleFactory;
import org.openide.DialogDisplayer;
import org.openide.NotifyDescriptor;
import org.openide.filesystems.FileLock;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.loaders.DataObjectExistsException;
import org.openide.loaders.MultiFileLoader;
import org.openide.util.Exceptions;
public class OgreBinaryMeshDataObject extends SpatialAssetDataObject {
public OgreBinaryMeshDataObject(FileObject pf, MultiFileLoader loader) throws DataObjectExistsException, IOException {
super(pf, loader);
}
@Override
public Spatial loadAsset() {
ProgressHandle handle = ProgressHandleFactory.createHandle("Converting OgreBinary");
handle.start();
//mesh
OgreXMLConvertOptions options = new OgreXMLConvertOptions(getPrimaryFile().getPath());
options.setBinaryFile(true);
OgreXMLConvert conv = new OgreXMLConvert();
conv.doConvert(options, handle);
//try skeleton
if (getPrimaryFile().existsExt("skeleton")) {
OgreXMLConvertOptions options2 = new OgreXMLConvertOptions(getPrimaryFile().getParent().getFileObject(getPrimaryFile().getName(), "skeleton").getPath());
options2.setBinaryFile(true);
OgreXMLConvert conv2 = new OgreXMLConvert();
conv2.doConvert(options2, handle);
}
handle.progress("Convert Model");
ProjectAssetManager mgr = getLookup().lookup(ProjectAssetManager.class);
if (mgr == null) {
DialogDisplayer.getDefault().notifyLater(new NotifyDescriptor.Message("File is not part of a project!\nCannot load without ProjectAssetManager."));
return null;
}
String assetKey = mgr.getRelativeAssetPath(options.getDestFile());
FileLock lock = null;
try {
lock = getPrimaryFile().lock();
listListener.start();
Spatial spatial = mgr.loadModel(assetKey);
//replace transient xml files in list of assets for this model
replaceXmlFiles();
listListener.stop();
savable = spatial;
lock.releaseLock();
File deleteFile = new File(options.getDestFile());
deleteFile.delete();
handle.finish();
return spatial;
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
if (lock != null) {
lock.releaseLock();
}
}
File deleteFile = new File(options.getDestFile());
deleteFile.delete();
handle.finish();
return null;
}
private void replaceXmlFiles() {
List<FileObject> newFiles = new ArrayList<FileObject>();
for (Iterator<FileObject> it = assetList.iterator(); it.hasNext();) {
FileObject fileObject = it.next();
if (fileObject.hasExt("xml")) {
FileObject binaryFile = fileObject.getParent().getFileObject(fileObject.getName());
if (binaryFile != null) {
newFiles.add(binaryFile);
it.remove();
}
}
}
for (Iterator<FileObject> it = newFiles.iterator(); it.hasNext();) {
FileObject fileObject = it.next();
assetList.add(fileObject);
}
}
}
|
package com.smartdevicelink.proxy.rpc.enums;
/**
* The VR capabilities of the connected SDL platform.
*
*/
public enum VrCapabilities {
/**
* The SDL platform is capable of recognizing spoken text in the current
* language.
*
* @since SmartDeviceLink 1.0
*/
@Deprecated
Text,
/**
* The SDL platform is capable of recognizing spoken text in the current
* language.
*
* @since SmartDeviceLink 3.0
*/
TEXT,
;
public static VrCapabilities valueForString(String value) {
if (value.equalsIgnoreCase(TEXT.toString()))
{
return TEXT;
}
return valueOf(value);
}
}
|
package com.metamx.druid.master;
import com.metamx.druid.client.DataSegment;
import com.metamx.druid.client.DruidServer;
public class DruidMasterBalancerTester extends DruidMasterBalancer
{
public DruidMasterBalancerTester(DruidMaster master)
{
super(master);
}
@Override
protected void moveSegment(
final BalancerSegmentHolder segment,
final DruidServer toServer,
final DruidMasterRuntimeParams params
)
{
final String toServerName = toServer.getName();
final LoadQueuePeon toPeon = params.getLoadManagementPeons().get(toServerName);
final String fromServerName = segment.getFromServer().getName();
final DataSegment segmentToMove = segment.getSegment();
final String segmentName = segmentToMove.getIdentifier();
if (!toPeon.getSegmentsToLoad().contains(segmentToMove) &&
!currentlyMovingSegments.get("normal").containsKey(segmentName) &&
!toServer.getSegments().containsKey(segmentName) &&
new ServerHolder(toServer, toPeon).getAvailableSize() > segmentToMove.getSize()) {
log.info(
"Moving [%s] from [%s] to [%s]",
segmentName,
fromServerName,
toServerName
);
try {
final LoadQueuePeon loadPeon = params.getLoadManagementPeons().get(toServerName);
loadPeon.loadSegment(segment.getSegment(), new LoadPeonCallback()
{
@Override
protected void execute()
{
}
});
currentlyMovingSegments.get("normal").put(segmentName, segment);
}
catch (Exception e) {
log.info(e, String.format("[%s] : Moving exception", segmentName));
}
} else {
currentlyMovingSegments.get("normal").remove(segment);
}
}
}
|
package com.intellij.openapi.roots.ui.configuration;
import com.intellij.compiler.Chunk;
import com.intellij.compiler.ModuleCompilerUtil;
import com.intellij.ide.util.projectWizard.AddModuleWizard;
import com.intellij.ide.util.projectWizard.ModuleBuilder;
import com.intellij.javaee.J2EEModuleUtil;
import com.intellij.javaee.module.J2EEModuleUtilEx;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.module.ModifiableModuleModel;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.project.ProjectBundle;
import com.intellij.openapi.roots.ModifiableRootModel;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.roots.ModuleRootModel;
import com.intellij.openapi.roots.impl.ProjectRootManagerImpl;
import com.intellij.openapi.roots.ui.configuration.actions.ModuleDeleteProvider;
import com.intellij.openapi.roots.ui.configuration.projectRoot.ProjectRootConfigurable;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.util.graph.CachingSemiGraph;
import com.intellij.util.graph.GraphGenerator;
import com.intellij.facet.impl.ProjectFacetsConfigurator;
import com.intellij.facet.impl.ui.ConfigureFacetsStep;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.util.*;
import java.util.List;
public class ModulesConfigurator implements ModulesProvider, ModuleEditor.ChangeListener {
private final Project myProject;
private boolean myModified = false;
private ProjectConfigurable myProjectConfigurable;
private final List<ModuleEditor> myModuleEditors = new ArrayList<ModuleEditor>();
private final Comparator<ModuleEditor> myModuleEditorComparator = new Comparator<ModuleEditor>() {
final ModulesAlphaComparator myModulesComparator = new ModulesAlphaComparator();
public int compare(ModuleEditor editor1, ModuleEditor editor2) {
return myModulesComparator.compare(editor1.getModule(), editor2.getModule());
}
@SuppressWarnings({"EqualsWhichDoesntCheckParameterClass"})
public boolean equals(Object o) {
return false;
}
};
private ModifiableModuleModel myModuleModel;
private ProjectFacetsConfigurator myFacetsConfigurator;
public ModulesConfigurator(Project project, ProjectRootConfigurable configurable) {
myProject = project;
myModuleModel = ModuleManager.getInstance(myProject).getModifiableModel();
myProjectConfigurable = new ProjectConfigurable(project, this, configurable.getProjectJdksModel());
myFacetsConfigurator = new ProjectFacetsConfigurator();
}
public ProjectFacetsConfigurator getFacetsConfigurator() {
return myFacetsConfigurator;
}
public void disposeUIResources() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
for (final ModuleEditor moduleEditor : myModuleEditors) {
final ModifiableRootModel model = moduleEditor.dispose();
if (model != null) {
model.dispose();
}
}
myModuleEditors.clear();
myModuleModel.dispose();
}
});
}
public ProjectConfigurable getModulesConfigurable() {
return myProjectConfigurable;
}
public Module[] getModules() {
return myModuleModel.getModules();
}
@Nullable
public Module getModule(String name) {
final Module moduleByName = myModuleModel.findModuleByName(name);
if (moduleByName != null) {
return moduleByName;
}
return myModuleModel.getModuleToBeRenamed(name); //if module was renamed
}
@Nullable
public ModuleEditor getModuleEditor(Module module) {
for (final ModuleEditor moduleEditor : myModuleEditors) {
if (module.equals(moduleEditor.getModule())) {
return moduleEditor;
}
}
return null;
}
public ModuleRootModel getRootModel(Module module) {
final ModuleEditor editor = getModuleEditor(module);
ModuleRootModel rootModel = null;
if (editor != null) {
rootModel = editor.getModifiableRootModel();
}
if (rootModel == null) {
rootModel = ModuleRootManager.getInstance(module);
}
return rootModel;
}
public void resetModuleEditors() {
myModuleModel = ModuleManager.getInstance(myProject).getModifiableModel();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
for (final ModuleEditor moduleEditor : myModuleEditors) {
moduleEditor.removeChangeListener(ModulesConfigurator.this);
}
myModuleEditors.clear();
final Module[] modules = myModuleModel.getModules();
if (modules.length > 0) {
for (Module module : modules) {
createModuleEditor(module, null, null);
}
Collections.sort(myModuleEditors, myModuleEditorComparator);
}
}
});
myFacetsConfigurator.reset();
myModified = false;
}
private void createModuleEditor(final Module module, ModuleBuilder moduleBuilder, final @Nullable ConfigureFacetsStep facetsStep) {
final ModuleEditor moduleEditor = new ModuleEditor(myProject, this, module.getName(), moduleBuilder);
if (facetsStep != null) {
myFacetsConfigurator.registerEditors(module, facetsStep);
}
myModuleEditors.add(moduleEditor);
moduleEditor.addChangeListener(this);
}
public void moduleStateChanged(final ModifiableRootModel moduleRootModel) {
myProjectConfigurable.updateCircularDependencyWarning();
}
public GraphGenerator<ModifiableRootModel> createGraphGenerator() {
final Map<Module, ModifiableRootModel> models = new HashMap<Module, ModifiableRootModel>();
for (ModuleEditor moduleEditor : myModuleEditors) {
models.put(moduleEditor.getModule(), moduleEditor.getModifiableRootModel());
}
return createGraphGenerator(models);
}
private static GraphGenerator<ModifiableRootModel> createGraphGenerator(final Map<Module, ModifiableRootModel> models) {
return GraphGenerator.create(CachingSemiGraph.create(new GraphGenerator.SemiGraph<ModifiableRootModel>() {
public Collection<ModifiableRootModel> getNodes() {
return models.values();
}
public Iterator<ModifiableRootModel> getIn(final ModifiableRootModel model) {
final Module[] modules = model.getModuleDependencies();
final List<ModifiableRootModel> dependencies = new ArrayList<ModifiableRootModel>();
for (Module module : modules) {
dependencies.add(models.get(module));
}
return dependencies.iterator();
}
}));
}
public void apply() throws ConfigurationException {
final ProjectRootManagerImpl projectRootManager = ProjectRootManagerImpl.getInstanceImpl(myProject);
final ConfigurationException[] ex = new ConfigurationException[1];
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
try {
final List<ModifiableRootModel> models = new ArrayList<ModifiableRootModel>(myModuleEditors.size());
for (final ModuleEditor moduleEditor : myModuleEditors) {
final ModifiableRootModel model = moduleEditor.applyAndDispose();
if (model != null) {
models.add(model);
}
}
myFacetsConfigurator.applyAndDispose();
J2EEModuleUtilEx.checkJ2EEModulesAcyclic(models);
final ModifiableRootModel[] rootModels = models.toArray(new ModifiableRootModel[models.size()]);
projectRootManager.multiCommit(myModuleModel, rootModels);
myFacetsConfigurator.commitFacets();
}
catch (ConfigurationException e) {
ex[0] = e;
}
finally {
myFacetsConfigurator = new ProjectFacetsConfigurator();
myModuleModel = ModuleManager.getInstance(myProject).getModifiableModel();
for (Module module : myModuleModel.getModules()) {
if (!module.isDisposed()) {
final ModuleEditor moduleEditor = getModuleEditor(module);
if (moduleEditor != null) {
final ModuleBuilder builder = moduleEditor.getModuleBuilder();
if (builder != null) {
builder.addSupport(module);
}
}
}
}
}
}
});
if (ex[0] != null) {
throw ex[0];
}
if (!J2EEModuleUtilEx.checkDependentModulesOutputPathConsistency(myProject, J2EEModuleUtil.getAllJ2EEModules(myProject), true)) {
throw new ConfigurationException(null);
}
ApplicationManager.getApplication().saveAll();
myModified = false;
}
public void setModified(final boolean modified) {
myModified = modified;
}
public ModifiableModuleModel getModuleModel() {
return myModuleModel;
}
public boolean deleteModule(final Module module) {
return doRemoveModule(getModuleEditor(module));
}
@Nullable
public Module addModule(Component parent) {
if (myProject.isDefault()) return null;
final Pair<ModuleBuilder, ConfigureFacetsStep> pair = runModuleWizard(parent);
final ModuleBuilder builder = pair.getFirst();
if (builder != null) {
final Module module = createModule(builder);
if (module != null) {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
createModuleEditor(module, builder, pair.getSecond());
}
});
}
return module;
}
return null;
}
private Module createModule(final ModuleBuilder builder) {
final Exception[] ex = new Exception[]{null};
final Module module = ApplicationManager.getApplication().runWriteAction(new Computable<Module>() {
@SuppressWarnings({"ConstantConditions"})
public Module compute() {
try {
return builder.createModule(myModuleModel);
}
catch (Exception e) {
ex[0] = e;
return null;
}
}
});
if (ex[0] != null) {
Messages.showErrorDialog(ProjectBundle.message("module.add.error.message", ex[0].getMessage()),
ProjectBundle.message("module.add.error.title"));
}
return module;
}
void addModule(final ModuleBuilder moduleBuilder, final ConfigureFacetsStep facetsStep) {
final Module module = createModule(moduleBuilder);
if (module != null) {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
createModuleEditor(module, moduleBuilder, facetsStep);
Collections.sort(myModuleEditors, myModuleEditorComparator);
}
});
processModuleCountChanged(myModuleEditors.size() - 1, myModuleEditors.size());
}
}
Pair<ModuleBuilder, ConfigureFacetsStep> runModuleWizard(Component dialogParent) {
AddModuleWizard wizard = new AddModuleWizard(dialogParent, myProject, this);
wizard.show();
final ConfigureFacetsStep facetEditorsStep = wizard.getFacetEditorsStep();
if (wizard.isOK()) {
return Pair.create(wizard.getModuleBuilder(), facetEditorsStep);
}
return Pair.create(null, null);
}
private boolean doRemoveModule(ModuleEditor selectedEditor) {
String question;
if (myModuleEditors.size() == 1) {
question = ProjectBundle.message("module.remove.last.confirmation");
}
else {
question = ProjectBundle.message("module.remove.confirmation", selectedEditor.getModule().getName());
}
int result =
Messages.showYesNoDialog(myProject, question, ProjectBundle.message("module.remove.confirmation.title"), Messages.getQuestionIcon());
if (result != 0) {
return false;
}
// do remove
myModuleEditors.remove(selectedEditor);
// destroyProcess removed module
final Module moduleToRemove = selectedEditor.getModule();
// remove all dependencies on the module that is about to be removed
List<ModifiableRootModel> modifiableRootModels = new ArrayList<ModifiableRootModel>();
for (final ModuleEditor moduleEditor : myModuleEditors) {
final ModifiableRootModel modifiableRootModel = moduleEditor.getModifiableRootModelProxy();
modifiableRootModels.add(modifiableRootModel);
}
// destroyProcess editor
final ModifiableRootModel model = selectedEditor.dispose();
ModuleDeleteProvider.removeModule(moduleToRemove, model, modifiableRootModels, myModuleModel);
processModuleCountChanged(myModuleEditors.size() + 1, myModuleEditors.size());
return true;
}
private void processModuleCountChanged(int oldCount, int newCount) {
for (ModuleEditor moduleEditor : myModuleEditors) {
moduleEditor.moduleCountChanged(oldCount, newCount);
}
}
public void processModuleCompilerOutputChanged(String baseUrl) {
for (ModuleEditor moduleEditor : myModuleEditors) {
moduleEditor.updateCompilerOutputPathChanged(baseUrl, moduleEditor.getName());
}
}
public boolean isModified() {
if (myModuleModel.isChanged()) {
return true;
}
for (ModuleEditor moduleEditor : myModuleEditors) {
if (moduleEditor.isModified()) {
return true;
}
}
return myModified || myFacetsConfigurator.isModified() ||
!J2EEModuleUtilEx.checkDependentModulesOutputPathConsistency(myProject, J2EEModuleUtil.getAllJ2EEModules(myProject), false);
}
public static boolean showDialog(Project project,
@Nullable final String moduleToSelect,
final String tabNameToSelect,
final boolean showModuleWizard) {
final ProjectRootConfigurable projectRootConfigurable = ProjectRootConfigurable.getInstance(project);
return ShowSettingsUtil.getInstance().editConfigurable(project, projectRootConfigurable, new Runnable() {
public void run() {
if (moduleToSelect != null) {
projectRootConfigurable.selectModuleTab(moduleToSelect, tabNameToSelect);
}
projectRootConfigurable.setStartModuleWizard(showModuleWizard);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
projectRootConfigurable.setStartModuleWizard(false);
}
});
}
});
}
public void moduleRenamed(final String oldName, final String name) {
for (ModuleEditor moduleEditor : myModuleEditors) {
if (Comparing.strEqual(moduleEditor.getName(), oldName)) {
moduleEditor.setModuleName(name);
moduleEditor.updateCompilerOutputPathChanged(ProjectRootConfigurable.getInstance(myProject).getCompilerOutputUrl(), name);
return;
}
}
}
/**
* @return pair of modules which become circular after adding dependency, or null if all remains OK
*/
@Nullable
public static Pair<Module, Module> addingDependencyFormsCircularity(final Module currentModule, Module toDependOn) {
assert currentModule != toDependOn;
// whatsa lotsa of @&#^%$ codes-a!
final Map<Module, ModifiableRootModel> models = new LinkedHashMap<Module, ModifiableRootModel>();
Project project = currentModule.getProject();
for (Module module : ModuleManager.getInstance(project).getModules()) {
ModifiableRootModel model = ModuleRootManager.getInstance(module).getModifiableModel();
models.put(module, model);
}
ModifiableRootModel currentModel = models.get(currentModule);
ModifiableRootModel toDependOnModel = models.get(toDependOn);
Collection<Chunk<ModifiableRootModel>> nodesBefore = buildChunks(models);
for (Chunk<ModifiableRootModel> chunk : nodesBefore) {
if (chunk.containsNode(toDependOnModel) && chunk.containsNode(currentModel)) return null; // they circular already
}
try {
currentModel.addModuleOrderEntry(toDependOn);
Collection<Chunk<ModifiableRootModel>> nodesAfter = buildChunks(models);
for (Chunk<ModifiableRootModel> chunk : nodesAfter) {
if (chunk.containsNode(toDependOnModel) && chunk.containsNode(currentModel)) {
Iterator<ModifiableRootModel> nodes = chunk.getNodes().iterator();
return Pair.create(nodes.next().getModule(), nodes.next().getModule());
}
}
}
finally {
for (ModifiableRootModel model : models.values()) {
model.dispose();
}
}
return null;
}
private static Collection<Chunk<ModifiableRootModel>> buildChunks(final Map<Module, ModifiableRootModel> models) {
return ModuleCompilerUtil.toChunkGraph(createGraphGenerator(models)).getNodes();
}
}
|
package org.cbsa.api.model;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
@SuppressWarnings("deprecation")
public class MetadataManager {
private Configuration config;
private HBaseAdmin admin;
private HTable fileInfoTable;
private HTable fileKeywordsTable;
/***
* Use this to create meta data tables and start connection to database to
* store meta data of files. This constructor will create meta data tables
* to HBase database if not exists and print error if fails to create
* database tables.
*/
public MetadataManager() {
if (!MetadataTables.createTables()) {
System.err.println("Metadata Table Creation Failed");
return;
}
config = HBaseConfiguration.create();
try {
fileInfoTable = new HTable(config, MetaSchama.TB_FILE_INFO);
fileKeywordsTable = new HTable(config, MetaSchama.TB_FILE_KEYWORDS);
} catch (IOException e) {
e.printStackTrace();
}
try {
admin = new HBaseAdmin(config);
} catch (IOException e) {
e.printStackTrace();
}
}
/***
* Use this method to add new meta data of file to database.
*
* @param fileMetadata
*/
public void addNewFileMetadata(FileMetadata fileMetadata) {
Put putFileInfo = new Put(Bytes.toBytes(fileMetadata.getFileID()));
putFileInfo.add(Bytes.toBytes(MetaSchama.CF_GENERAL),
Bytes.toBytes(MetaSchama.CO_FILE_NAME),
Bytes.toBytes(fileMetadata.getFileName()));
putFileInfo.add(Bytes.toBytes(MetaSchama.CF_GENERAL),
Bytes.toBytes(MetaSchama.CO_FILE_PATH),
Bytes.toBytes(fileMetadata.getFilePath()));
putFileInfo.add(Bytes.toBytes(MetaSchama.CF_GENERAL),
Bytes.toBytes(MetaSchama.CO_FILE_SIZE),
Bytes.toBytes(fileMetadata.getFileSize()));
putFileInfo.add(Bytes.toBytes(MetaSchama.CF_GENERAL),
Bytes.toBytes(MetaSchama.CO_TOTAL_PAGES),
Bytes.toBytes(fileMetadata.getTotalPages()));
List<String> fileDomains = fileMetadata.getFileDomains();
for (int i = 0; i < fileDomains.size(); i++) {
putFileInfo.add(Bytes.toBytes(MetaSchama.CF_DOMAIN),
Bytes.toBytes(MetaSchama.CO_SUB_DOMAIN + i),
Bytes.toBytes(fileDomains.get(i)));
}
putFileInfo.add(Bytes.toBytes(MetaSchama.CF_FILE_ID),
Bytes.toBytes(MetaSchama.CO_ID),
Bytes.toBytes(fileMetadata.getFileID()));
List<Put> putKeywordsList = new ArrayList<Put>();
List<Keyword> fileKeywords = fileMetadata.getKeywords();
for (int i = 0; i < fileKeywords.size(); i++) {
Put putKeywords = new Put(Bytes.toBytes(fileMetadata.getFileID()
+ "_" + String.valueOf(i)));
putKeywords.add(Bytes.toBytes(MetaSchama.CF_FILE_ID),
Bytes.toBytes(MetaSchama.CO_ID),
Bytes.toBytes(fileMetadata.getFileID()));
putKeywords.add(Bytes.toBytes(MetaSchama.CF_KEYWORDS),
Bytes.toBytes(MetaSchama.CO_KEYWORD),
Bytes.toBytes(fileKeywords.get(i).getKeyword()));
putKeywords.add(Bytes.toBytes(MetaSchama.CF_KEYWORDS),
Bytes.toBytes(MetaSchama.CO_FREQUENCY),
Bytes.toBytes(fileKeywords.get(i).getFrequency()));
putKeywordsList.add(putKeywords);
}
try {
fileInfoTable.put(putFileInfo);
for (int i = 0; i < fileKeywords.size(); i++) {
fileKeywordsTable.put(putKeywordsList.get(i));
}
fileInfoTable.close();
fileKeywordsTable.close();
} catch (IOException e) {
e.printStackTrace();
System.err.println("File Metadata Insertion Failed");
}
System.out.println("New File Metadata Added");
}
}
|
package org.objectweb.proactive.p2p.v2.service;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Random;
import java.util.Set;
import java.util.Vector;
import org.apache.log4j.Logger;
import org.objectweb.proactive.Body;
import org.objectweb.proactive.InitActive;
import org.objectweb.proactive.ProActive;
import org.objectweb.proactive.ProActiveInternalObject;
import org.objectweb.proactive.RunActive;
import org.objectweb.proactive.Service;
import org.objectweb.proactive.core.ProActiveRuntimeException;
import org.objectweb.proactive.core.config.PAProperties;
import org.objectweb.proactive.core.node.Node;
import org.objectweb.proactive.core.node.NodeFactory;
import org.objectweb.proactive.core.util.log.Loggers;
import org.objectweb.proactive.core.util.log.ProActiveLogger;
import org.objectweb.proactive.core.util.wrapper.BooleanMutableWrapper;
import org.objectweb.proactive.core.util.wrapper.IntMutableWrapper;
import org.objectweb.proactive.p2p.v2.service.messages.AcquaintanceRequest;
import org.objectweb.proactive.p2p.v2.service.util.P2PConstants;
/**
* Updating the group of exportAcquaintances of the P2P service.
*
* @author Alexandre di Costanzo
*
*/
public class P2PAcquaintanceManager implements InitActive, RunActive,
Serializable, P2PConstants, ProActiveInternalObject {
/**
* The maximum waiting time before considering an ACQ request
* is lost and should be resent
*/
private static long MAX_WAIT_TIME = 10000;
private final static Logger logger = ProActiveLogger.getLogger(Loggers.P2P_ACQUAINTANCES);
static public int NOA = Integer.parseInt(PAProperties.PA_P2P_NOA.getValue());
// static public int NOA = new NOAPowerLawGenerator(1, 9, -3).nextInt();
// protected Random rand = new Random();
private Random randomizer = new Random();
/**
* A ProActive Stub on the local P2PService
*/
private P2PService localService = null;
private P2PService acquaintancesActived = null;
protected AcquaintancesWrapper acquaintances;
//store the name of awaited replies for setting acquaintances
protected HashMap<String, DatedRequest> awaitedReplies = new HashMap<String, DatedRequest>();
//list of prefered acquaintances
private HashSet<String> preferedAcquaintancesURLs = new HashSet<String>();
/**
* The empty constructor for activating.
*/
public P2PAcquaintanceManager() {
// empty constructor
}
/**
* Construct a new <code>P2PAcquaintanceManager</code>.
* @param localService a reference to the local P2P service.
*/
public P2PAcquaintanceManager(P2PService localService) {
this.localService = localService;
}
/**
* @see org.objectweb.proactive.InitActive#initActivity(org.objectweb.proactive.Body)
*/
public void initActivity(Body body) {
// String nodeUrl = body.getNodeURL();
this.acquaintances = new AcquaintancesWrapper();
logger.debug("Group of exportAcquaintances successfuly created");
}
/**
* @see org.objectweb.proactive.RunActive#runActivity(org.objectweb.proactive.Body)
*/
public void runActivity(Body body) {
Service service = new Service(body);
body.getRequestQueue();
while (body.isActive()) {
if (this.acquaintances.size() > 0) {
// Register the local P2P service in all exportAcquaintances
logger.debug("Sending heart-beat");
this.acquaintances.getAcquaintances().heartBeat();
logger.debug("Heart-beat sent");
}
if (this.getEstimatedNumberOfAcquaintances() < NOA) {
// this.dumpTables();
lookForNewPeers();
} else if (this.acquaintances.size() > NOA) {
//we should drop some here
//do we go for all at once or just one at a time?
logger.info("I have too many neighbors!");
this.dropRandomPeer();
} else {
logger.info("I have reached the maximum of acquaintance ");
}
waitTTU(service);
// this.dumpTables();
this.cleanAwaitedReplies();
}
}
protected void lookForNewPeers() {
// How many peers ?
if (this.getEstimatedNumberOfAcquaintances() < NOA) {
// Looking for new peers
logger.debug("NOA is " + NOA +
" - Size of P2PAcquaintanceManager is " +
this.getEstimatedNumberOfAcquaintances() +
" looking for new acquaintances through prefered ones");
this.connectToPreferedAcquaintances();
}
// How many peers ?
if (this.getEstimatedNumberOfAcquaintances() < NOA) {
// Looking for new peers
logger.debug("NOA is " + NOA +
" - Size of P2PAcquaintanceManager is " +
this.getEstimatedNumberOfAcquaintances() +
" looking for new acquaintances through exploration");
// Sending exploring message
// System.out.println(">>>>>>>>>>>>>>>>> P2PAcquaintanceManager.runActivity()");
this.localService.explore();
logger.debug("Explorating message sent");
}
}
protected void waitTTU(Service service) {
// Waiting TTU & serving requests
logger.debug("Waiting for " + P2PService.TTU + "ms");
long endTime = System.currentTimeMillis() + P2PService.TTU;
service.blockingServeOldest(P2PService.TTU);
while (System.currentTimeMillis() < endTime) {
try {
service.blockingServeOldest(endTime -
System.currentTimeMillis());
} catch (ProActiveRuntimeException e) {
e.printStackTrace();
logger.debug("Certainly because the body is not active", e);
}
}
logger.debug("End waiting");
}
public void connectToPreferedAcquaintances() {
int size = this.preferedAcquaintancesURLs.size();
// int index = 0;
// System.out.println(
// "P2PAcquaintanceManager.connectToPreferedAcquaintances() number of URLs " +
// preferedAcquaintancesURLs.size());
//while(!this.peers.isEmpty()) {
// for (int i = 0; i < size; i++) {
HashSet<String> newSet = new HashSet<String>();
String tmp = null;
Iterator it = this.preferedAcquaintancesURLs.iterator();
while (it.hasNext() &&
(this.getEstimatedNumberOfAcquaintances() < NOA)) {
//remove it from the current HashSet
//and add it in the temporary one
tmp = (String) it.next();
// newSet.add(tmp);
it.remove();
String peerUrl = buildCorrectUrl(urlAdderP2PNodeName(tmp));
// System.out.println(
// "P2PAcquaintanceManager.connectToPreferedAcquaintances() " +
// peerUrl);
try {
Node distNode = NodeFactory.getNode(peerUrl);
P2PService peer = (P2PService) distNode.getActiveObjects(P2PService.class.getName())[0];
if ( //!peer.equals(this.localService) &&
!this.contains(peer).booleanValue()) {
// Send a message to the remote peer to register myself
System.out.println(
"P2PAcquaintanceManager requesting peer " + peerUrl);
//peer.registerRequest(this.localService);
startAcquaintanceHandShake(peerUrl, peer);
} else {
newSet.add(peerUrl);
}
} catch (Exception e) {
System.out.println("The peer at " + peerUrl +
" couldn't be contacted");
e.printStackTrace();
//put it back for later use
newSet.add(peerUrl);
//remove it from the awaited
//awaitedReplies.remove(peerUrl));
}
}
if (this.size().intValue() == 0) {
logger.info("No peer could be found to join the network");
// System.out
// .println("P2PAcquaintanceManager.connectToPreferedAcquaintances() urls available " + this.preferedAcquaintancesURLs.size());
} else {
//add all the remaining urls
// System.out
// .println("P2PAcquaintanceManager.connectToPreferedAcquaintances() adding the remaining urls " + this.preferedAcquaintancesURLs.size());
newSet.addAll(this.preferedAcquaintancesURLs);
}
this.preferedAcquaintancesURLs = newSet;
// System.out
// .println("P2PAcquaintanceManager.connectToPreferedAcquaintances() at the end " + this.preferedAcquaintancesURLs.size());
}
/**
* Remove awaited requests which have timeouted
*/
@SuppressWarnings("unchecked")
public void cleanAwaitedReplies() {
// System.out.println("P2PAcquaintanceManager.cleanAwaitedReplies() still " + awaitedReplies.size() );
ArrayList<String> urls = new ArrayList<String>();
Set<Map.Entry<String, DatedRequest>> map = (Set<Map.Entry<String, DatedRequest>>) awaitedReplies.entrySet();
Iterator it = map.iterator();
while (it.hasNext()) {
Map.Entry<String, DatedRequest> entry = (Map.Entry<String, DatedRequest>) it.next();
// System.out.println("P2PAcquaintanceManager.cleanAwaitedReplies() request sent at " + ((DatedRequest) entry.getValue()).getTime() );
// System.out.println("P2PAcquaintanceManager.cleanAwaitedReplies() now " + System.currentTimeMillis());
if ((System.currentTimeMillis() -
((DatedRequest) entry.getValue()).getTime()) > MAX_WAIT_TIME) {
System.out.println("xxxxx Peer " + entry.getKey() +
" did not reply to our request");
//this guy did not reply so we should put it back in the preferedACQList
urls.add(entry.getKey());
it.remove();
}
}
it = urls.iterator();
while (it.hasNext()) {
this.preferedAcquaintancesURLs.add((String) it.next());
}
}
/**
* Starts an acquaintance handshake
* Send a message to the peer and add it to the awaited replies list
* @param peerUrl
* @param peer
*/
public void startAcquaintanceHandShake(String peerUrl, P2PService peer) {
this.localService.transmit(new AcquaintanceRequest(1), peer);
// peer.message(new AcquaintanceRequest(1,
// this.localService.generateUuid(), this.localService));
System.out.println("XXXXXX putting in awaited List " + peerUrl);
awaitedReplies.put(buildCorrectUrl(peerUrl),
new DatedRequest(peer, System.currentTimeMillis()));
}
/**
* @return An active object to make group method call.
*/
public P2PService getActiveGroup() {
return this.acquaintancesActived;
}
/**
* Add a peer in the group of acquaintances
* Add only if not already present and still some space left (NOA)
* @param peer the peer to add.
* @return add succesfull
*/
public Vector<String> add(P2PService peer) {
return this.add(buildCorrectUrl(ProActive.getActiveObjectNodeUrl(peer)),
peer);
}
/**
* Add a peer in the group of acquaintances
* Add only if not already present and still some space left (NOA)
* @param peerUrl the url of the peer
* @param peer the peer to add
* @return add succesfull
*/
public Vector<String> add(String peerUrl, P2PService peer) {
boolean result = false;
try {
if (this.shouldBeAcquaintance(peer)) {
if (!peerUrl.matches(".*cannot contact the body.*")) {
result = this.acquaintances.add(peer, peerUrl);
logger.info("Acquaintance " + peerUrl + " " + result +
" added");
}
return null;
}
} catch (Exception e) {
this.acquaintances.remove(peer, peerUrl);
logger.debug("Problem when adding peer", e);
}
return this.getAcquaintancesURLs();
}
public void acqAccepted(String url, P2PService peer) {
logger.info("P2PAcquaintanceManager.acqAccepted() got a reply from " +
url);
System.out.println("URL:" + url + " PEER:" + peer);
//we remove it from the awaited answers
//if we don't do so, it might be refused because of the NOA limit
this.removeFromAwaited(url);
this.add(url, peer);
System.out.println("P2PAcquaintanceManager.acqAccepted() adding " +
"--" + url + "--");
this.preferedAcquaintancesURLs.add(url);
Iterator it = this.preferedAcquaintancesURLs.iterator();
while (it.hasNext()) {
System.out.println(" " + it.next());
}
}
public void acqRejected(String url, Vector<String> s) {
logger.info("P2PAcquaintanceManager.acqRejected() " + url);
//this.removeFromReply(url,s);
this.removeFromAwaited(url);
this.addToPreferedAcq(s);
//we add it back
this.preferedAcquaintancesURLs.add(url);
}
public void removeFromAwaited(String url) {
String[] tmp = this.getAwaitedRepliesUrls();
for (int i = 0; i < tmp.length; i++) {
System.out.println("--" + tmp[i] + "--");
}
logger.info("Removing --" + url + "-- from awaited peers " +
awaitedReplies.remove(url));
}
/**
* Remove the peer from the current acquaintaces
* Add the acquaintancesURLs to the prefered acquaintances
* @param peer
* @param acquaintancesURLs
*/
public void remove(P2PService peer, Vector<String> acquaintancesURLs) {
boolean result = this.acquaintances.remove(peer,
buildCorrectUrl(ProActive.getActiveObjectNodeUrl(peer)));
if (acquaintancesURLs != null) {
this.addToPreferedAcq(acquaintancesURLs);
}
//boolean result = this.acquaintances.remove(peer);
if (result) {
logger.info("Peer successfully removed");
} else {
logger.debug("Peer not removed");
}
}
protected void dropRandomPeer() {
//pick up a random peer in the list
P2PService p = randomPeer();
//logger.info(" I have decided to drop " + p.getAddress());
this.remove(p, null);
p.remove(this.localService, this.getAcquaintancesURLs());
}
// public void dumpAcquaintances() {
// acquaintances.dumpAcquaintances();
public Vector<String> getAcquaintancesURLs() {
return new Vector<String>(Arrays.asList(
this.acquaintances.getAcquaintancesURLs()));
}
/**
* Returns the number of elements in this group.
*
* @return the number of elements in this group.
*/
public IntMutableWrapper size() {
return new IntMutableWrapper(this.acquaintances.size());
}
public int getEstimatedNumberOfAcquaintances() {
return this.acquaintances.size() + awaitedReplies.size();
}
/**
* Returns <tt>true</tt> if this collection contains the specified
* element. More formally, returns <tt>true</tt> if and only if this
* collection contains at least one element <tt>e</tt> such that
* <tt>(o==null ? e==null : o.equals(e))</tt>.
*
* @param service element whose presence in this collection is to be tested.
* @return <tt>true</tt> if this collection contains the specified
* element.
*/
public BooleanMutableWrapper contains(P2PService service) {
return new BooleanMutableWrapper(this.acquaintances.contains(service));
}
/**
* @return a random acquaintance reference.
*/
public P2PService randomPeer() {
int random = this.randomizer.nextInt(this.acquaintances.size());
return (P2PService) this.acquaintances.get(random);
}
/**
* @return the list of current acquaintances.
*/
public Vector getAcquaintanceList() {
return new Vector(this.acquaintances.getAcquaintancesAsGroup());
}
public P2PService getAcquaintances() {
return this.acquaintances.getAcquaintances();
}
// /**
// * Calls the transmit() method of the message m
// * @param m
// */
// public void transmit(Message m) {
//// m.transmit(this.acquaintances.getAcquaintances());
// m.transmit(localService);
public int getMaxNOA() {
return NOA;
}
public boolean shouldBeAcquaintance(P2PService remoteService) {
if (this.contains(remoteService).booleanValue()) {
logger.debug("The remote peer is already known");
return false;
}
return acceptAnotherAcquaintance();
}
/**
* Indicates wether or not a new acquaintance should be accepted
* This is defined using a probability
* Always accept if 0 <= getEstimatedNumberOfAcquaintances() < NOA
* Accept with probability P if NOA <= getEstimatedNumberOfAcquaintances() < 2*NOA
* Reject otherwise
* The probability is linear
* y = -1/NOA*estimatedNumberOfAcquaintances + 2
* @return a boolean
*/
protected boolean acceptAnotherAcquaintance() {
if (this.getEstimatedNumberOfAcquaintances() < NOA) {
logger.debug("NOA not reached: I should be an acquaintance");
return true;
}
if (this.getEstimatedNumberOfAcquaintances() > (2 * NOA)) {
logger.debug("2*NOA reached, I refuse the acquaintance");
return false;
}
//we are in the grey area, only accept with some probability
//first compute the probability according to the max number
logger.info("estimatedNOA " + this.getEstimatedNumberOfAcquaintances());
double prob = (-(1.0 / this.getMaxNOA()) * this.getEstimatedNumberOfAcquaintances()) +
2;
logger.info("Probability to accept set to " + prob);
return (randomizer.nextDouble() <= prob);
// logger.debug("Accepted after probability check");
// return true;
// logger.debug("Random said: I should not be an acquaintance");
// return false;
}
public void setPreferedAcq(Vector<String> v) {
this.preferedAcquaintancesURLs = new HashSet<String>();
Iterator it = v.iterator();
while (it.hasNext()) {
String p = buildCorrectUrl((String) it.next());
System.out.println(p);
this.preferedAcquaintancesURLs.add(p);
}
}
/**
* Add the given peer urls to the current
* prefered acquaintances list
* @param v the list of acquaintances
*/
public void addToPreferedAcq(Vector<String> v) {
this.preferedAcquaintancesURLs.addAll(v);
}
public String[] getAwaitedRepliesUrls() {
return this.awaitedReplies.keySet().toArray(new String[] { });
}
/**
* Add the default name of the P2P Node to a specified <code>url</code>.
* @param url the url.
* @return the <code>url</code> with the name of the P2P Node.
*/
private static String urlAdderP2PNodeName(String url) {
if (url.indexOf(P2P_NODE_NAME) <= 0) {
url += ("/" + P2P_NODE_NAME);
}
return url;
}
/**
* Add rmi:// in front of all URLS
* @param s
* @return
*/
private String buildCorrectUrl(String s) {
if (s.indexOf("
s = "
}
if (s.indexOf("rmi:") < 0) {
s = "rmi:" + s;
}
if (s.indexOf(P2PConstants.P2P_NODE_NAME) < 0) {
s = s + "/" + P2PConstants.P2P_NODE_NAME;
}
return s;
}
public void setMaxNOA(int noa) {
logger.info("P2PAcquaintanceManager.setNOA() changing noa from " + NOA +
" to " + noa);
P2PAcquaintanceManager.NOA = noa;
}
// public void setMaxNOA(int noa) {
// logger.info("P2PAcquaintanceManager.setMaxNOA() changing noa from " + NOA + " to " + noa);
// P2PAcquaintanceManager.MaxNOA = noa;
public void dumpTables() {
System.out.println("
Iterator it = preferedAcquaintancesURLs.iterator();
while (it.hasNext()) {
System.out.println(it.next());
}
System.out.println("
System.out.println("
this.awaitedReplies.size());
Set<Map.Entry<String, DatedRequest>> map = (Set<Map.Entry<String, DatedRequest>>) awaitedReplies.entrySet();
Iterator it2 = map.iterator();
while (it2.hasNext()) {
Map.Entry<String, DatedRequest> entry = (Map.Entry<String, DatedRequest>) it2.next();
System.out.println(entry.getKey() + " requested at " +
((DatedRequest) entry.getValue()).getTime());
}
System.out.println("
}
/**
* A class to remember when an ACQ request has
* been issued
* @author fhuet
*/
private class DatedRequest {
protected P2PService service;
protected long time;
DatedRequest(P2PService s, long t) {
this.service = service;
this.time = t;
}
public long getTime() {
return this.time;
}
public P2PService getP2PService() {
return service;
}
}
}
|
package de.longri.cachebox3.gui.views;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.InputMultiplexer;
import com.badlogic.gdx.files.FileHandle;
import com.badlogic.gdx.graphics.g3d.Model;
import com.badlogic.gdx.input.GestureDetector;
import com.badlogic.gdx.scenes.scene2d.Touchable;
import de.longri.cachebox3.CB;
import de.longri.cachebox3.CacheboxMain;
import de.longri.cachebox3.gui.CacheboxMapAdapter;
import de.longri.cachebox3.gui.map.MapState;
import de.longri.cachebox3.gui.map.MapViewPositionChangedHandler;
import de.longri.cachebox3.gui.map.layer.Compass;
import de.longri.cachebox3.gui.map.layer.LocationOverlay;
import de.longri.cachebox3.gui.map.layer.MyLocationModel;
import de.longri.cachebox3.gui.stages.StageManager;
import de.longri.cachebox3.gui.widgets.MapCompass;
import de.longri.cachebox3.gui.widgets.MapStateButton;
import de.longri.cachebox3.gui.widgets.ZoomButton;
import de.longri.cachebox3.locator.Location;
import de.longri.cachebox3.locator.Locator;
import org.oscim.core.MapPosition;
import org.oscim.event.Event;
import org.oscim.gdx.LayerHandler;
import org.oscim.gdx.MotionHandler;
import org.oscim.layers.TileGridLayer;
import org.oscim.layers.tile.buildings.BuildingLayer;
import org.oscim.layers.tile.vector.VectorTileLayer;
import org.oscim.layers.tile.vector.labeling.LabelLayer;
import org.oscim.map.Layers;
import org.oscim.map.Map;
import org.oscim.map.Viewport;
import org.oscim.renderer.BitmapRenderer;
import org.oscim.renderer.GLViewport;
import org.oscim.renderer.MapRenderer;
import org.oscim.renderer.bucket.TextItem;
import org.oscim.renderer.bucket.TextureBucket;
import org.oscim.renderer.bucket.TextureItem;
import org.oscim.scalebar.*;
import org.oscim.theme.VtmThemes;
import org.oscim.tiling.source.mapfile.MapFileTileSource;
import org.slf4j.LoggerFactory;
public class MapView extends AbstractView {
final static org.slf4j.Logger log = LoggerFactory.getLogger(MapView.class);
private InputMultiplexer mapInputHandler;
private CacheboxMapAdapter mMap;
private final CacheboxMain main;
private MapScaleBarLayer mapScaleBarLayer;
private float myBearing;
private final MapStateButton mapStateButton;
private final MapCompass mapOrientationButton;
private final ZoomButton zoomButton;
private MapFileTileSource tileSource;
LocationOverlay myLocationAccuracy;
MyLocationModel myLocationModel;
MapViewPositionChangedHandler positionChangedHandler;
public MapView(CacheboxMain main) {
super("MapView");
this.setTouchable(Touchable.disabled);
this.main = main;
mMap = createMap();
mapStateButton = new MapStateButton(new MapStateButton.StateChangedListener() {
@Override
public void stateChanged(MapState state) {
positionChangedHandler.setMapState(state);
checkInputListener();
Location actLocation;
double scale;
switch (state) {
case FREE:
break;
case GPS:
// set to act position
actLocation = Locator.getLocation();
scale = mMap.getMapPosition().getScale();
mMap.setMapPosition(actLocation.latitude, actLocation.longitude, scale);
break;
case WP:
break;
case LOCK:
break;
case CAR:
// set to act position
actLocation = Locator.getLocation();
scale = mMap.getMapPosition().getScale();
mMap.setMapPosition(actLocation.latitude, actLocation.longitude, scale);
// set full tilt
MapPosition mapPosition = mMap.getMapPosition();
mapPosition.setTilt(Viewport.MAX_TILT);
mMap.setMapPosition(mapPosition);
// // set orientation by bearing
// mapOrientationButton.setChecked(true);
break;
}
}
});
this.mapOrientationButton = new MapCompass(mapStateButton.getWidth(), mapStateButton.getHeight());
this.addActor(mapStateButton);
this.addActor(mapOrientationButton);
this.setTouchable(Touchable.enabled);
this.zoomButton = new ZoomButton(new ZoomButton.ValueChangeListener() {
@Override
public void valueChanged(int changeValue) {
if (changeValue > 0)
MapView.this.mMap.animator().animateZoom(500, 2, 0, 0);
else
MapView.this.mMap.animator().animateZoom(500, 0.5, 0, 0);
MapView.this.mMap.updateMap(true);
}
});
this.zoomButton.pack();
this.addActor(zoomButton);
}
private void checkInputListener() {
MapState state = mapStateButton.getState();
// remove input handler with map state Car and Lock
if (state == MapState.CAR || state == MapState.LOCK) {
removeInputListener();
} else {
addInputListener();
}
}
public CacheboxMapAdapter createMap() {
main.drawMap = true;
mMap = new CacheboxMapAdapter() {
@Override
public void tiltChanged(float newTilt) {
if (positionChangedHandler != null) positionChangedHandler.tiltChangedFromMap(newTilt);
}
@Override
public void onMapEvent(Event e, MapPosition mapPosition) {
if (e == Map.MOVE_EVENT) {
// map is moved by user
mapStateButton.setState(MapState.FREE);
}
}
};
main.mMapRenderer = new MapRenderer(mMap);
main.mMapRenderer.onSurfaceCreated();
mMap.setMapPosition(52.580400947530364, 13.385594096047232, 1 << 17);
// grid,labels,buldings,scalebar
initLayers(false, true, true, true);
//add position changed handler
positionChangedHandler = MapViewPositionChangedHandler.getInstance
(mMap, myLocationModel, myLocationAccuracy, mapOrientationButton);
return mMap;
}
public void destroyMap() {
main.drawMap = true;
mMap.clearMap();
mMap.destroy();
mMap = null;
TextureBucket.pool.clear();
TextItem.pool.clear();
TextureItem.disposeTextures();
main.mMapRenderer = null;
}
@Override
protected void create() {
// overide and don't call super
// for non creation of default name label
}
@Override
public void onShow() {
addInputListener();
}
@Override
public void onHide() {
destroyMap();
}
@Override
public void dispose() {
log.debug("Dispose MapView");
mapInputHandler.clear();
mapInputHandler = null;
mMap = null;
mapStateButton.dispose();
}
@Override
public void sizeChanged() {
if (mMap == null) return;
mMap.setSize((int) this.getWidth(), (int) this.getHeight());
mMap.viewport().setScreenSize((int) this.getWidth(), (int) this.getHeight());
main.setMapPosAndSize((int) this.getX(), (int) this.getY(), (int) this.getWidth(), (int) this.getHeight());
// set position of MapScaleBar
setMapScaleBarOffset(CB.scaledSizes.MARGIN, CB.scaledSizes.MARGIN_HALF);
mapStateButton.setPosition(getWidth() - (mapStateButton.getWidth() + CB.scaledSizes.MARGIN),
getHeight() - (mapStateButton.getHeight() + CB.scaledSizes.MARGIN));
mapOrientationButton.setPosition(CB.scaledSizes.MARGIN,
getHeight() - (mapOrientationButton.getHeight() + CB.scaledSizes.MARGIN));
zoomButton.setPosition(getWidth() - (zoomButton.getWidth() + CB.scaledSizes.MARGIN), CB.scaledSizes.MARGIN);
}
@Override
public void positionChanged() {
main.setMapPosAndSize((int) this.getX(), (int) this.getY(), (int) this.getWidth(), (int) this.getHeight());
}
protected void initLayers(boolean tileGrid, boolean labels,
boolean buildings, boolean mapScalebar) {
// TileSource tileSource = new OSciMap4TileSource();
tileSource = new MapFileTileSource();
FileHandle mapFileHandle = Gdx.files.local(CB.WorkPath + "/repository/maps/germany.map");
tileSource.setMapFile(mapFileHandle.path());
tileSource.setPreferredLanguage("en");
Layers layers = mMap.layers();
//MyLocationLayer
myLocationAccuracy = new LocationOverlay(mMap, new Compass() {
@Override
public void setEnabled(boolean enabled) {
}
@Override
public float getRotation() {
return myBearing;
}
});
myLocationAccuracy.setPosition(52.580400947530364, 13.385594096047232, 100);
Model model = CB.getSkin().get("MyLocationModel", Model.class);
myLocationModel = new MyLocationModel(mMap, model);
myLocationAccuracy.setPosition(52.580400947530364, 13.385594096047232, 100);
if (tileSource != null) {
VectorTileLayer mapLayer = mMap.setBaseMap(tileSource);
mMap.setTheme(VtmThemes.DEFAULT);
if (buildings)
layers.add(new BuildingLayer(mMap, mapLayer));
if (labels)
layers.add(new LabelLayer(mMap, mapLayer));
}
if (tileGrid)
layers.add(new TileGridLayer(mMap));
if (mapScalebar) {
DefaultMapScaleBar mapScaleBar = new DefaultMapScaleBar(mMap);
mapScaleBar.setScaleBarMode(DefaultMapScaleBar.ScaleBarMode.BOTH);
mapScaleBar.setDistanceUnitAdapter(MetricUnitAdapter.INSTANCE);
mapScaleBar.setSecondaryDistanceUnitAdapter(ImperialUnitAdapter.INSTANCE);
mapScaleBar.setScaleBarPosition(MapScaleBar.ScaleBarPosition.BOTTOM_LEFT);
mapScaleBarLayer = new MapScaleBarLayer(mMap, mapScaleBar);
layers.add(mapScaleBarLayer);
layers.add(myLocationAccuracy);
layers.add(myLocationModel);
}
}
public void setMapScaleBarOffset(float xOffset, float yOffset) {
if (mapScaleBarLayer == null) return;
BitmapRenderer renderer = mapScaleBarLayer.getRenderer();
renderer.setPosition(GLViewport.Position.BOTTOM_LEFT);
renderer.setOffset(xOffset, yOffset);
}
public void setInputListener(boolean on) {
if (on) {
checkInputListener();
} else {
removeInputListener();
}
}
private void createMapInputHandler() {
GestureDetector gestureDetectore = new GestureDetector(new LayerHandler(mMap));
MotionHandler motionHandler = new MotionHandler(mMap);
MapInputHandler inputHandler = new MapInputHandler(mMap) {
@Override
public void rotateByUser() {
// mapOrientationButton.setChecked(false);
}
};
mapInputHandler = new InputMultiplexer();
mapInputHandler.addProcessor(motionHandler);
mapInputHandler.addProcessor(gestureDetectore);
mapInputHandler.addProcessor(inputHandler);
}
private void addInputListener() {
if (mapInputHandler == null) createMapInputHandler();
StageManager.addMapMultiplexer(mapInputHandler);
}
private void removeInputListener() {
StageManager.removeMapMultiplexer(mapInputHandler);
}
}
|
/*
* Last updated on April 23, 2010, 11:40 AM
*
* * To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ch.unizh.ini.jaer.projects.gesture.virtualdrummer;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.geom.Point2D.Float;
import javax.media.opengl.GL;
import java.awt.geom.Point2D;
import java.util.*;
import javax.media.opengl.GLAutoDrawable;
import net.sf.jaer.chip.*;
import net.sf.jaer.event.*;
import net.sf.jaer.eventprocessing.EventFilter2D;
import net.sf.jaer.graphics.FrameAnnotater;
/**
* Finds clusters of events using spatio-temporal correlation between events.
* Events occured within the specified area (called a LIFNeuron) are considered strongly correalated.
* How much the events are correlated is evaluated using a parameter called 'membranePotential'.
* By thresholding the membranePotential of a neuron, firing neurons can be defined.
* Then the clusters of events can be detected from the neuron groups.
* The neuron group is a group of firing neurons which are linked each other. (If two adjacent neurons are firing at the same time, they are linked).
* (Notice) BlurringFilter2D is a cluster finder rather than a filter. It does NOT filters any events.
*
* @author Jun Haeng Lee
*/
public class BlurringFilter2D extends EventFilter2D implements FrameAnnotater, Observer {
/**
* Time constant of LIF neurons membrane potential. It decays exponetially unless a new event is added.
*/
protected int MPTimeConstantUs = getPrefs().getInt("BlurringFilter2D.MPTimeConstantUs", 30000);
/**
* Life time of LIF neuron.
* A neuron will be reset if there is no additional event within this value of micro seconds since the last update.
*/
protected int neuronLifeTimeUs = getPrefs().getInt("BlurringFilter2D.neuronLifeTimeUs", 2000000);
/**
* threshold of membrane potetial required for firing.
*/
private int MPThreshold = getPrefs().getInt("BlurringFilter2D.MPThreshold", 15);
/**
* size of the receptive field of an LIF neuron.
*/
protected int receptiveFieldSizePixels = getPrefs().getInt("BlurringFilter2D.receptiveFieldSizePixels", 8);
/**
* Membrane potential of a neuron jumps down by this amount after firing.
*/
protected float MPJumpAfterFiring = getPrefs().getFloat("BlurringFilter2D.MPJumpAfterFiring", 10.0f);
/**
* if true, the receptive field of firing neurons are displayed on the screen.
*/
private boolean showFiringNeurons = getPrefs().getBoolean("BlurringFilter2D.showFiringNeurons", false);
/**
* if true, the receptive field of firing neurons are displayed with filled sqaures.
* if false, they are shown with hallow squares.
*/
private boolean filledReceptiveField = getPrefs().getBoolean("BlurringFilter2D.filledReceptiveField", true);
/**
* shows neurons with firing type of FIRING_ON_BORDER only.
*/
private boolean showBorderNeuronsOnly = getPrefs().getBoolean("BlurringFilter2D.showBorderNeuronsOnly", true);
/**
* shows neurons with firing type of FIRING_INSIDE only.
*/
private boolean showInsideNeuronsOnly = getPrefs().getBoolean("BlurringFilter2D.showInsideNeuronsOnly", true);
/**
* color to draw the receptive field of firing neurons
*/
private COLOR_CHOICE colorToDrawRF = COLOR_CHOICE.valueOf(getPrefs().get("BlurringFilter2D.colorToDrawRF", COLOR_CHOICE.orange.toString()));
/**
* names of color
*/
public static enum COLOR_CHOICE {black, blue, cyan, darkgray, gray, green, lightgray, magenta, orange, pink, red, white, yellow};
/**
* A map containing a mapping from color names to color values
*/
private final static HashMap<COLOR_CHOICE, Color> colors = new HashMap<COLOR_CHOICE, Color>();
/**
* The base set of colors
*/
static {
colors.put(COLOR_CHOICE.black, Color.black);
colors.put(COLOR_CHOICE.blue, Color.blue);
colors.put(COLOR_CHOICE.cyan, Color.cyan);
colors.put(COLOR_CHOICE.darkgray, Color.darkGray);
colors.put(COLOR_CHOICE.gray, Color.gray);
colors.put(COLOR_CHOICE.green, Color.green);
colors.put(COLOR_CHOICE.lightgray, Color.lightGray);
colors.put(COLOR_CHOICE.magenta, Color.magenta);
colors.put(COLOR_CHOICE.orange, Color.orange);
colors.put(COLOR_CHOICE.pink, Color.pink);
colors.put(COLOR_CHOICE.red, Color.red);
colors.put(COLOR_CHOICE.white, Color.white);
colors.put(COLOR_CHOICE.yellow, Color.yellow);
}
/**
* RGB value of color
*/
private float[] rgb = new float[4];
/**
* Constants to define neighbor neurons.
* upper neighbor.
*/
static int UPDATE_UP = 0x01;
/**
* Constants to define neighbor neurons.
* lower neighbor.
*/
static int UPDATE_DOWN = 0x02;
/**
* Constants to define neighbor neurons.
* right neighbor.
*/
static int UPDATE_RIGHT = 0x04;
/**
* Constants to define neighbor neurons.
* left neighbor.
*/
static int UPDATE_LEFT = 0x08;
/**
* DVS Chip
*/
protected AEChip mychip;
/**
* number of neurons in x (column) directions.
*/
protected int numOfNeuronsX = 0;
/**
* number of neurons in y (row) directions.
*/
protected int numOfNeuronsY = 0;
/**
* array of neurons (numOfNeuronsX x numOfNeuronsY)
*/
protected ArrayList<LIFNeuron> lifNeurons = new ArrayList<LIFNeuron>();
/**
* index of firing neurons
*/
private HashSet<Integer> firingNeurons = new HashSet();
/**
* neuron groups found
*/
private HashMap<Integer, NeuronGroup> neuronGroups = new HashMap<Integer, NeuronGroup>();
/**
* number of neuron groups found
*/
protected int numOfGroup = 0;
/**
* last updat time. It is the timestamp of the latest event.
*/
protected int lastTime;
/**
* random
*/
protected Random random = new Random();
/**
* Constructor of BlurringFilter2D
* @param chip
*/
public BlurringFilter2D(AEChip chip) {
super(chip);
this.mychip = chip;
// initializes filter
initFilter();
colors.get(colorToDrawRF).getRGBComponents(rgb);
// adds this class as an observer
chip.addObserver(this);
addObserver(this);
// adds tooltips
final String lif_neuron = "LIF Neuron", disp = "Display";
setPropertyTooltip(lif_neuron, "MPTimeConstantUs", "Time constant of LIF neurons membrane potential. It decays exponetially unless a new event is added.");
setPropertyTooltip(lif_neuron, "neuronLifeTimeUs", "A neuron will be reset if there is no additional event within this value of micro seconds since the last update.");
setPropertyTooltip(lif_neuron, "MPThreshold", "threshold of membrane potetial required for firing.");
setPropertyTooltip(lif_neuron, "MPJumpAfterFiring", "Membrane potential of a neuron jumps down by this amount after firing.");
setPropertyTooltip(lif_neuron, "receptiveFieldSizePixels", "size of the receptive field of an LIF neuron.");
setPropertyTooltip(disp, "showFiringNeurons", "if true, the receptive field of firing neurons are displayed on the screen.");
setPropertyTooltip(disp, "filledReceptiveField", "if true, the receptive field of firing neurons are displayed with filled sqaures. Otherwise, they are shown with hallow squares.");
setPropertyTooltip(disp, "showBorderNeuronsOnly", "shows neurons with firing type of FIRING_ON_BORDER only.");
setPropertyTooltip(disp, "showInsideNeuronsOnly", "shows neurons with firing type of FIRING_INSIDE only.");
setPropertyTooltip(disp, "colorToDrawRF", "color to draw the receptive field of firing neurons");
}
@Override
public String toString() {
String s = lifNeurons != null ? Integer.toString(numOfNeuronsX).concat(" by ").concat(Integer.toString(numOfNeuronsY)) : null;
String s2 = "BlurringFilter2D with " + s + " neurons ";
return s2;
}
public void update(Observable o, Object arg) {
if (o == this) {
UpdateMessage msg = (UpdateMessage) arg;
updateNeurons(msg.timestamp); // at least once per packet update list
} else if (o instanceof AEChip) {
initFilter();
}
}
/**
* Definition of location types of LIF neurons
* CORNER_* : neurons that are located in corners
* EDGE_* : neurons that are located in edges
* INSIDE: all neurons except corner and egde neurons
*/
public static enum LocationType {
CORNER_00, CORNER_01, CORNER_10, CORNER_11, EDGE_0Y, EDGE_1Y, EDGE_X0, EDGE_X1, INSIDE
}
/**
* Definition of firing types of neurons
*/
public static enum FiringType {
/**
* does not fire due to the low membrane potential
*/
SILENT,
/**
* fires alone. Its neighbor neurons don't fire.
*/
FIRING_ISOLATED,
/**
* fires together with at least one of its neighbors
*/
FIRING_WITH_NEIGHBOR,
/**
* firing neuron which makes the boundary of a group of simultaneously firing neurons
*/
FIRING_ON_BORDER,
/**
* non-border firing neuron which belongs a group of simultaneously firing neurons
*/
FIRING_INSIDE;
}
/**
* Firing type update type
*/
static enum FiringTypeUpdate {
/**
* updates forcibly
*/
FORCED,
/**
* updates if necessary based on the current condition
*/
CHECK;
}
/**
* Definition of leaky integrate and fire (LIF) neuron.
* The receptive field is a partial area of events-occuring space.
* Events within the receptive field of a neuron are considered strongly correalated.
* Spacing of the receptive field of two adjacent LIF neurons is decided to the half of side length of the receptive field to increase the spatial resolution.
* Thus, each neuron shares half area of the receptive field with its neighbor.
*/
public class LIFNeuron {
/**
* Neuron index in (x_index, y_index)
*/
public Point2D.Float index = new Point2D.Float();
/**
* spatial location of a neuron in chip pixels
*/
public Point2D.Float location = new Point2D.Float();
/**
* location type of a neuron. One of {CORNER_00, CORNER_01, CORNER_10, CORNER_11, EDGE_0Y, EDGE_1Y, EDGE_X0, EDGE_X1, INSIDE}
*/
LocationType locationType;
/**
* firing type of a neuron. One of {SILENT, FIRING_ISOLATED, FIRING_WITH_NEIGHBOR, FIRING_ON_BORDER, FIRING_INSIDE}
*/
FiringType firingType;
/**
* Tag to identify the group which the neuron belongs to.
*/
protected int groupTag = -1;
/**
* true if the neuron fired a spike.
*/
protected boolean fired = false;
/** The "membranePotential" of the neuron.
* The membranePotential decays over time (i.e., leaky) and is incremented by one by each collected event.
* The membranePotential decays with a first order time constant of MPTimeConstantUs in us.
* The membranePotential dreases by the amount of MPJumpAfterFiring after firing an event.
*/
protected float membranePotential = 0;
/**
* Center of membrane potential.
*/
protected Point2D.Float centerMP = new Point2D.Float();
/**
* number of firing neighbors
*/
protected int numFiringNeighbors = 0;
/**
* This is the last in timestamp ticks that the neuron was updated, by an event
*/
protected int lastEventTimestamp;
/**
* defined as index.x + index.y * numOfNeuronsX
*/
private int cellNumber;
/**
* color to display the neuron's receptive field.
*/
// protected Color color = null;
/**
* Construct an LIF neuron with index.
*
* @param indexX
* @param indexY
*/
public LIFNeuron(int indexX, int indexY) {
float hue = random.nextFloat();
// Color c = Color.getHSBColor(hue, 1f, 1f);
if (indexX < 0 || indexY < 0 || indexX >= numOfNeuronsX || indexY >= numOfNeuronsY) {
// exception
}
// sets invariable parameters
index.x = (float) indexX;
index.y = (float) indexY;
location.x = (index.x + 1) * receptiveFieldSizePixels / 2;
location.y = (index.y + 1) * receptiveFieldSizePixels / 2;
cellNumber = (int) index.x + (int) index.y * numOfNeuronsX;
// resets initially variable parameters
reset();
}
/**
* Resets a neuron with initial values
*/
public void reset() {
setFiringType(FiringType.SILENT);
resetGroupTag();
fired = false;
membranePotential = 0;
centerMP.x = location.x;
centerMP.y = location.y;
numFiringNeighbors = 0;
lastEventTimestamp = 0;
}
@Override
public int hashCode() {
return cellNumber;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if ((obj == null) || (obj.getClass() != this.getClass())) {
return false;
}
LIFNeuron test = (LIFNeuron) obj;
return cellNumber == test.cellNumber;
}
/** Draws the neuron using OpenGL.
*
* @param drawable area to drawReceptiveField this.
*/
public void drawReceptiveField(GLAutoDrawable drawable) {
final float BOX_LINE_WIDTH = 2f; // in chip
GL gl = drawable.getGL();
// set color and line width
gl.glColor3fv(rgb, 0);
gl.glLineWidth(BOX_LINE_WIDTH);
// draws the receptive field of a neuron
gl.glPushMatrix();
gl.glTranslatef((int) getLocation().x, (int) getLocation().y, 0);
if (filledReceptiveField) {
gl.glBegin(GL.GL_QUADS);
} else {
gl.glBegin(GL.GL_LINE_LOOP);
}
int halfSize = (int) receptiveFieldSizePixels / 2;
gl.glVertex2i(-halfSize, -halfSize);
gl.glVertex2i(+halfSize, -halfSize);
gl.glVertex2i(+halfSize, +halfSize);
gl.glVertex2i(-halfSize, +halfSize);
gl.glEnd();
gl.glPopMatrix();
}
/**
* returns firing type
*
* @return
*/
private FiringType getFiringType() {
return firingType;
}
/**
* sets firing type
*
* @param firingType
*/
private void setFiringType(FiringType firingType) {
this.firingType = firingType;
}
/**
* sets the firing type of the neuron to FIRING_ON_BORDER.
* If neuronFiringTypeUpdateType is FiringTypeUpdate.CHECK, an inside neuron cannot be a border neuron.
*
* @param groupTag
* @param neuronFiringTypeUpdateType
*/
private void setFiringTypeToBorder(int groupTag, FiringTypeUpdate neuronFiringTypeUpdateType) {
if (neuronFiringTypeUpdateType == FiringTypeUpdate.CHECK) {
if (this.firingType != FiringType.FIRING_INSIDE) {
setFiringType(FiringType.FIRING_ON_BORDER);
}
} else {
setFiringType(FiringType.FIRING_ON_BORDER);
}
setGroupTag(groupTag);
}
/**
* updates a neuron with an additional event.
*
* @param event
* @param weight
*/
public void addEvent(BasicEvent event,float weight) {
incrementMP(event.getTimestamp(), weight);
centerMP.x = (centerMP.x*(membranePotential-weight) + event.x*weight)/membranePotential;
centerMP.y = (centerMP.y*(membranePotential-weight) + event.y*weight)/membranePotential;
lastEventTimestamp = event.getTimestamp();
}
/**
* Computes and returns {@link #membranePotential} at time t, using the last time an event hit this neuron
* and the {@link #MPTimeConstantUs}. Does not change the membranePotential itself.
*
* @param t timestamp now.
* @return the membranePotential.
*/
protected float getMPNow(int t) {
float m = membranePotential * (float) Math.exp(((float) (lastEventTimestamp - t)) / MPTimeConstantUs);
return m;
}
/**
* returns the membranePotential without considering the current time.
*
* @return membranePotential
*/
protected float getMP() {
return membranePotential;
}
/**
* Increments membranePotential of the neuron by amount of weight after decaying it away since the {@link #lastEventTimestamp} according
* to exponential decay with time constant {@link #MPTimeConstantUs}.
*
* @param timeStamp
* @param weight
*/
protected void incrementMP(int timeStamp, float weight) {
membranePotential = weight + membranePotential * (float) Math.exp(((float) lastEventTimestamp - timeStamp) / MPTimeConstantUs);
}
/**
* returns the neuron's location in pixels.
*
* @return
*/
final public Point2D.Float getLocation() {
return location;
}
/**
* returns the neuron's center of membranePotential.
*
* @return
*/
final public Point2D.Float getCenterMP() {
return centerMP;
}
/**
* returns true if the neuron fired a spike.
* Otherwise, returns false.
*
* @return
*/
final public boolean isFired() {
return fired;
}
/**
* checks if the neuron's membrane potential is above the threshold
*
* @return
*/
public boolean isAboveThreshold() {
if (getMPNow(lastTime) < MPThreshold){
fired = false;
firingType = FiringType.SILENT;
}else{
// fires a spike
fired = true;
firingType = FiringType.FIRING_ISOLATED;
// decreases MP by MPJumpAfterFiring after firing
membranePotential -= MPJumpAfterFiring;
}
resetGroupTag();
return fired;
}
@Override
public String toString() {
return String.format("LIF Neuron index=(%d, %d), location = (%d, %d), membrane potential = %.2f",
(int) index.x, (int) index.y,
(int) location.x, (int) location.y,
membranePotential);
}
/**
* returns index
*
* @return
*/
public Float getIndex() {
return index;
}
/**
* returns location type
*
* @return
*/
private LocationType getLocationType() {
return locationType;
}
/**
* sets location type
*
* @param locationType
*/
private void setLocationType(LocationType locationType) {
this.locationType = locationType;
}
/**
* returns the number of simutaneously firing neighbors
*
* @return
*/
public int getNumFiringNeighbors() {
return numFiringNeighbors;
}
/**
* sets the number of simutaneously firing neighbors
*
* @param numFiringNeighbors
*/
public void setNumFiringNeighbors(int numFiringNeighbors) {
this.numFiringNeighbors = numFiringNeighbors;
}
/**
* increases the number of firing neighbors
*
*/
public void increaseNumFiringNeighbors() {
numFiringNeighbors++;
}
/**
* returns the cell number of a neuron
*
* @return cell number
*/
public int getCellNumber() {
return cellNumber;
}
/**
* returns the group tag
*
* @return group tag
*/
public int getGroupTag() {
return groupTag;
}
/**
* sets the group tag
*
* @param groupTag
*/
public void setGroupTag(int groupTag) {
// If groupTag is a negative value, give a new group tag
if (groupTag < 0) {
if (this.groupTag < 0) {
this.groupTag = numOfGroup;
numOfGroup++;
}
} else {
this.groupTag = groupTag;
}
}
/**
* resets group tag
*
*/
public void resetGroupTag() {
this.groupTag = -1;
}
/**
* returns the last event timestamp
*
* @return timestamp of the last event collected by the neuron
*/
public int getLastEventTimestamp() {
return lastEventTimestamp;
}
/**
* sets the last event timestamp
*
* @param lastEventTimestamp
*/
public void setLastEventTimestamp(int lastEventTimestamp) {
this.lastEventTimestamp = lastEventTimestamp;
}
} // End of class LIFNeuron
/** Definition of NeuronGroup
* NeuronGroup is a group of simultaneously firing neurons which are linked each other.
* Any two neighboring neurons are called linked if they are firing simultaneously.
* Each member neuron within the NeuronGroup has its FiringType which is one of {FIRING_ON_BORDER, FIRING_INSIDE}.
* Member neurons with FIRING_ON_BORDER are the border neurons making the boundary of the group.
* All member neurons except the border neurons should have FIRING_INSIDE type.
* NeuronGroups are utilized as a basis for finding clusters.
*/
public class NeuronGroup {
/**
* location of the group in chip pixels.
* Center of member neurons location weighted by their membranePotential.
*/
public Point2D.Float location = new Point2D.Float();
/**
* Sum of the membranePotential of all member neurons.
*/
protected float totalMP;
/**
* This is the last time in timestamp ticks that the group was updated by an event.
* The largest one among the lastUpdateTime of all member neurons becomes groups's lastEventTimestamp.
*/
protected int lastEventTimestamp;
/** Parameters to represent the area of the group.
* minX(Y) : minimum X(Y) among the locations of member neurons
* maxX(Y) : maximum X(Y) among the locations of member neurons
*/
protected float minX, maxX, minY, maxY;
/**
*Group number (index)
*/
protected int tag;
/**
*Indicates if this group is hitting edge
*/
protected boolean hitEdge = false;
/**
* used in tracker
* When a tracked cluster registered this group as its next cluster, it sets this value true.
* Then, other clusters cannot consider this group as its next one.
*/
protected boolean matched = false;
/**
* Member neurons consisting of this group
*/
HashSet<LIFNeuron> memberNeurons = null;
/**
* Constructor of Neurongroup
*/
public NeuronGroup() {
memberNeurons = new HashSet();
reset();
}
/**
* constructor with the first member
*
* @param firstNeuron
*/
public NeuronGroup(LIFNeuron firstNeuron) {
this();
add(firstNeuron);
}
/**
* resets the neuron group
*
*/
public void reset() {
location.setLocation(-1f, -1f);
totalMP = 0;
tag = -1;
memberNeurons.clear();
maxX = maxY = 0;
minX = chip.getSizeX();
minY = chip.getSizeX();
hitEdge = false;
matched = false;
}
/**
* adds a neuron into the group
* @param newNeuron
*/
public void add(LIFNeuron newNeuron) {
// if this is the first one
if (tag < 0) {
tag = newNeuron.getGroupTag();
lastEventTimestamp = newNeuron.getLastEventTimestamp();
location.x = newNeuron.centerMP.x;
location.y = newNeuron.centerMP.y;
totalMP = newNeuron.getMP();
} else { // if this is not the first one
float prevMP = totalMP;
float leakyFactor;
if (lastEventTimestamp < newNeuron.getLastEventTimestamp()) {
leakyFactor = (float) Math.exp(((float) lastEventTimestamp - newNeuron.getLastEventTimestamp()) / MPTimeConstantUs);
totalMP = newNeuron.getMP() + totalMP * leakyFactor;
location.x = (newNeuron.getCenterMP().x * newNeuron.getMP() + location.x * prevMP * leakyFactor) / (totalMP);
location.y = (newNeuron.getCenterMP().y * newNeuron.getMP() + location.y * prevMP * leakyFactor) / (totalMP);
lastEventTimestamp = newNeuron.getLastEventTimestamp();
} else {
leakyFactor = (float) Math.exp(((float) newNeuron.getLastEventTimestamp() - lastEventTimestamp) / MPTimeConstantUs);
totalMP += newNeuron.getMP() * leakyFactor;
location.x = (newNeuron.getCenterMP().x * newNeuron.getMP() * leakyFactor + location.x * prevMP) / (totalMP);
location.y = (newNeuron.getCenterMP().y * newNeuron.getMP() * leakyFactor + location.y * prevMP) / (totalMP);
}
}
// updates boundary of the group
if (newNeuron.getLocation().x < minX) {
minX = newNeuron.getLocation().x;
}
if (newNeuron.getLocation().x > maxX) {
maxX = newNeuron.getLocation().x;
}
if (newNeuron.getLocation().y < minY) {
minY = newNeuron.getLocation().y;
}
if (newNeuron.getLocation().y > maxY) {
maxY = newNeuron.getLocation().y;
}
// check if this group is hitting edges
if (!hitEdge && ((int) newNeuron.getIndex().x == 0 || (int) newNeuron.getIndex().y == 0 || (int) newNeuron.getIndex().x == numOfNeuronsX - 1 || (int) newNeuron.getIndex().y == numOfNeuronsY - 1)) {
hitEdge = true;
}
memberNeurons.add(newNeuron);
}
/**
* merges two groups
*
* @param targetGroup
*/
public void merge(NeuronGroup targetGroup) {
if (targetGroup == null) {
return;
}
float prevMP = totalMP;
float leakyFactor;
if (lastEventTimestamp < targetGroup.lastEventTimestamp) {
leakyFactor = (float) Math.exp(((float) lastEventTimestamp - targetGroup.lastEventTimestamp) / MPTimeConstantUs);
totalMP = targetGroup.totalMP + totalMP * leakyFactor;
location.x = (targetGroup.location.x * targetGroup.totalMP + location.x * prevMP * leakyFactor) / (totalMP);
location.y = (targetGroup.location.y * targetGroup.totalMP + location.y * prevMP * leakyFactor) / (totalMP);
lastEventTimestamp = targetGroup.lastEventTimestamp;
} else {
leakyFactor = (float) Math.exp(((float) targetGroup.lastEventTimestamp - lastEventTimestamp) / MPTimeConstantUs);
totalMP += (targetGroup.totalMP * leakyFactor);
location.x = (targetGroup.location.x * targetGroup.totalMP * leakyFactor + location.x * prevMP) / (totalMP);
location.y = (targetGroup.location.y * targetGroup.totalMP * leakyFactor + location.y * prevMP) / (totalMP);
}
if (targetGroup.minX < minX) {
minX = targetGroup.minX;
}
if (targetGroup.maxX > maxX) {
maxX = targetGroup.maxX;
}
if (targetGroup.minY < minY) {
minY = targetGroup.minY;
}
if (targetGroup.maxY > maxY) {
maxY = targetGroup.maxY;
}
for(LIFNeuron tmpNeuron : targetGroup.getMemberNeurons()) {
tmpNeuron.setGroupTag(tag);
memberNeurons.add(tmpNeuron);
}
targetGroup.reset();
}
/**
* calculates the distance between two groups in pixels
*
* @param targetGroup
* @return
*/
public float locationDistancePixels(NeuronGroup targetGroup) {
return (float) Math.sqrt(Math.pow(location.x - targetGroup.location.x, 2.0) + Math.pow(location.y - targetGroup.location.y, 2.0));
}
/**
* returns memberNeurons
*
* @return
*/
public HashSet<LIFNeuron> getMemberNeurons() {
return memberNeurons;
}
/**
* returns the number of member neurons
*
* @return
*/
public int getNumMemberNeurons() {
return memberNeurons.size();
}
/**
* returns the group membranePotential.
* Time constant is not necessary.
*
* @return
*/
public float getTotalMP() {
return totalMP;
}
/**
* returns the last event timestamp of the group.
*
* @return
*/
public int getLastEventTimestamp() {
return lastEventTimestamp;
}
/**
* returns the location of the group.
*
* @return
*/
public Float getLocation() {
return location;
}
/**
* returns the inner radius of the group.
*
* @return
*/
public float getInnerRadiusPixels() {
return Math.min(Math.min(Math.abs(location.x - minX), Math.abs(location.x - maxX)), Math.min(Math.abs(location.y - minY), Math.abs(location.y - maxY)));
}
/**
* returns the outter radius of the group.
*
* @return
*/
public float getOutterRadiusPixels() {
return Math.max(Math.max(Math.abs(location.x - minX), Math.abs(location.x - maxX)), Math.max(Math.abs(location.y - minY), Math.abs(location.y - maxY)));
}
/**
* returns dimension the group
*
* @return
*/
public Dimension getDimension(){
Dimension ret = new Dimension();
ret.width = (int) (maxX - minX);
ret.height = (int) (maxY - minY);
return ret;
}
/**
* returns the raidus of the group area by assuming that the shape of the group is a square.
*
* @return
*/
public float getAreaRadiusPixels() {
return (float) Math.sqrt((float) getNumMemberNeurons()) * receptiveFieldSizePixels / 4;
}
/**
* checks if the targer location is within the inner radius of the group.
*
* @param targetLoc
* @return
*/
public boolean isWithinInnerRadius(Float targetLoc) {
boolean ret = false;
float innerRaidus = getInnerRadiusPixels();
if (Math.abs(location.x - targetLoc.x) <= innerRaidus && Math.abs(location.y - targetLoc.y) <= innerRaidus) {
ret = true;
}
return ret;
}
/**
* checks if the targer location is within the outter radius of the group.
*
* @param targetLoc
* @return
*/
public boolean isWithinOuterRadius(Float targetLoc) {
boolean ret = false;
float outterRaidus = getOutterRadiusPixels();
if (Math.abs(location.x - targetLoc.x) <= outterRaidus && Math.abs(location.y - targetLoc.y) <= outterRaidus) {
ret = true;
}
return ret;
}
/**
* checks if the targer location is within the area radius of the group.
*
* @param targetLoc
* @return
*/
public boolean isWithinAreaRadius(Float targetLoc) {
boolean ret = false;
float areaRaidus = getAreaRadiusPixels();
if (Math.abs(location.x - targetLoc.x) <= areaRaidus && Math.abs(location.y - targetLoc.y) <= areaRaidus) {
ret = true;
}
return ret;
}
/**
* checks if the group contains the given event.
* It checks the location of the events
* @param ev
* @return
*/
public boolean contains(BasicEvent ev) {
boolean ret = false;
int subIndexX = (int) 2 * ev.getX() / receptiveFieldSizePixels;
int subIndexY = (int) 2 * ev.getY() / receptiveFieldSizePixels;
if (subIndexX >= numOfNeuronsX && subIndexY >= numOfNeuronsY) {
ret = false;
}
if (!ret && subIndexX != numOfNeuronsX && subIndexY != numOfNeuronsY) {
ret = firingNeurons.contains(subIndexX + subIndexY * numOfNeuronsX);
}
if (!ret && subIndexX != numOfNeuronsX && subIndexY != 0) {
ret = firingNeurons.contains(subIndexX + (subIndexY - 1) * numOfNeuronsX);
}
if (!ret && subIndexX != 0 && subIndexY != numOfNeuronsY) {
ret = firingNeurons.contains(subIndexX - 1 + subIndexY * numOfNeuronsX);
}
if (!ret && subIndexY != 0 && subIndexX != 0) {
ret = firingNeurons.contains(subIndexX - 1 + (subIndexY - 1) * numOfNeuronsX);
}
return ret;
}
/**
* returns true if the group contains neurons which locate on edges or corners.
*
* @return
*/
public boolean isHitEdge() {
return hitEdge;
}
/**
* Returns true if the group is matched to a cluster.
* This is used in a tracker module.
*
* @return
*/
public boolean isMatched() {
return matched;
}
/**
* Sets true if the group is matched to a cluster
* So, other cluster cannot take this group as a cluster
* @param matched
*/
public void setMatched(boolean matched) {
this.matched = matched;
}
} // End of class NeuronGroup
/**
* Processes the incoming events to have blurring filter output after first running the blurring to update the neurons.
*
* @param in the input packet.
* @return the packet after filtering by the enclosed FilterChain.
*/
@Override
synchronized public EventPacket<?> filterPacket (EventPacket<?> in){
out = in;
if ( in == null ){
return out;
}
if ( getEnclosedFilterChain() != null ){
out = getEnclosedFilterChain().filterPacket(in);
}
blurring(out);
return out;
}
/** Allocate the incoming events into the neurons
*
* @param in the input packet of BasicEvent
* @return the original input packet
*/
synchronized protected EventPacket<?> blurring(EventPacket<?> in) {
boolean updatedNeurons = false;
if(in == null)
return in;
if (in.getSize() == 0) {
return in;
}
try {
// add events to the corresponding neuron
for(int i=0; i<in.getSize(); i++){
BasicEvent ev = in.getEvent(i);
// don't reset on nonmonotonic, rather reset on rewind, which happens automatically
// if(ev.timestamp < lastTime){
// resetFilter();
int subIndexX = (int) (2.0f * ev.getX() / receptiveFieldSizePixels);
int subIndexY = (int) (2.0f * ev.getY() / receptiveFieldSizePixels);
if (subIndexX >= numOfNeuronsX && subIndexY >= numOfNeuronsY) {
initFilter();
}
if (subIndexX != numOfNeuronsX && subIndexY != numOfNeuronsY) {
lifNeurons.get(subIndexX + subIndexY * numOfNeuronsX).addEvent(ev, 1.0f);
}
if (subIndexX != numOfNeuronsX && subIndexY != 0) {
lifNeurons.get(subIndexX + (subIndexY - 1) * numOfNeuronsX).addEvent(ev, 1.0f);
}
if (subIndexX != 0 && subIndexY != numOfNeuronsY) {
lifNeurons.get(subIndexX - 1 + subIndexY * numOfNeuronsX).addEvent(ev, 1.0f);
}
if (subIndexY != 0 && subIndexX != 0) {
lifNeurons.get(subIndexX - 1 + (subIndexY - 1) * numOfNeuronsX).addEvent(ev, 1.0f);
}
lastTime = ev.getTimestamp();
updatedNeurons = maybeCallUpdateObservers(in, lastTime);
}
} catch (IndexOutOfBoundsException e) {
initFilter();
log.warning(e.getMessage());
}
if (!updatedNeurons) {
updateNeurons(lastTime); // at laest once per packet update list
callUpdateObservers(in, lastTime);
}
return in;
}
/**
* Updates all neurons at time t.
*
* Checks if the neuron is firing.
* Checks if the neuron has (a) simultaneously firing neighbor(s).
* Checks if the neuron belongs to a group.
* Set the neuron's firing type based on the test results.
*
* @param t
*/
synchronized protected void updateNeurons(int t) {
// makes the list of firing neurons and neuron groups empty before update
firingNeurons.clear();
neuronGroups.clear();
// resets number of group before starting update
numOfGroup = 0;
if (!lifNeurons.isEmpty()) {
int timeSinceSupport;
LIFNeuron upNeuron, downNeuron, leftNeuron, rightNeuron;
for(int i=0; i<lifNeurons.size(); i++){
LIFNeuron tmpNeuron = lifNeurons.get(i);
try {
// reset stale neurons
timeSinceSupport = t - tmpNeuron.lastEventTimestamp;
if (timeSinceSupport > neuronLifeTimeUs) {
tmpNeuron.reset();
}
int indexX = (int) tmpNeuron.getIndex().x;
int indexY = (int) tmpNeuron.getIndex().y;
tmpNeuron.setNumFiringNeighbors(0);
switch (tmpNeuron.getLocationType()) {
case CORNER_00:
upNeuron = lifNeurons.get( indexX + (indexY + 1) * numOfNeuronsX);
rightNeuron = lifNeurons.get(indexX + 1 + indexY * numOfNeuronsX);
// check threshold of the first neuron
tmpNeuron.isAboveThreshold();
if (upNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (rightNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 2) {
tmpNeuron.setGroupTag(-1);
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
upNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
rightNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
updateGroup(tmpNeuron, UPDATE_UP | UPDATE_RIGHT);
}
break;
case CORNER_01:
downNeuron = lifNeurons.get(indexX + (indexY - 1) * numOfNeuronsX);
rightNeuron = lifNeurons.get(indexX + 1 + indexY * numOfNeuronsX);
if (downNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (rightNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 2) {
if (rightNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
} else {
tmpNeuron.setGroupTag(-1);
}
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
rightNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
downNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_DOWN | UPDATE_RIGHT);
}
break;
case CORNER_10:
upNeuron = lifNeurons.get( indexX + (indexY + 1) * numOfNeuronsX);
leftNeuron = lifNeurons.get(indexX - 1 + indexY * numOfNeuronsX);
if (upNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (leftNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 2) {
tmpNeuron.setGroupTag(-1);
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
upNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
leftNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_UP | UPDATE_LEFT);
}
break;
case CORNER_11:
downNeuron = lifNeurons.get(indexX + (indexY - 1) * numOfNeuronsX);
leftNeuron = lifNeurons.get(indexX - 1 + indexY * numOfNeuronsX);
if (downNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (leftNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 2) {
if (leftNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
} else {
if (leftNeuron.getGroupTag() > 0 && downNeuron.getGroupTag() > 0) {
tmpNeuron.setGroupTag(Math.min(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
// do merge here
int targetGroupTag = Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag());
neuronGroups.get(tmpNeuron.getGroupTag()).merge(neuronGroups.get(targetGroupTag));
neuronGroups.remove(targetGroupTag);
} else if (leftNeuron.getGroupTag() < 0 && downNeuron.getGroupTag() < 0) {
tmpNeuron.setGroupTag(-1);
} else {
tmpNeuron.setGroupTag(Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
}
}
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
downNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
leftNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_DOWN | UPDATE_LEFT);
}
break;
case EDGE_0Y:
upNeuron = lifNeurons.get( indexX + (indexY + 1) * numOfNeuronsX);
downNeuron = lifNeurons.get(indexX + (indexY - 1) * numOfNeuronsX);
rightNeuron = lifNeurons.get(indexX + 1 + indexY * numOfNeuronsX);
if (upNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (downNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (rightNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 3) {
if (rightNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
} else {
tmpNeuron.setGroupTag(-1);
}
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
upNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
downNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
rightNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_UP | UPDATE_DOWN | UPDATE_RIGHT);
}
break;
case EDGE_1Y:
upNeuron = lifNeurons.get( indexX + (indexY + 1) * numOfNeuronsX);
downNeuron = lifNeurons.get(indexX + (indexY - 1) * numOfNeuronsX);
leftNeuron = lifNeurons.get(indexX - 1 + indexY * numOfNeuronsX);
if (upNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (downNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (leftNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 3) {
if (leftNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
} else {
if (leftNeuron.getGroupTag() > 0 && downNeuron.getGroupTag() > 0) {
tmpNeuron.setGroupTag(Math.min(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
// do merge here
int targetGroupTag = Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag());
neuronGroups.get(tmpNeuron.getGroupTag()).merge(neuronGroups.get(targetGroupTag));
neuronGroups.remove(targetGroupTag);
} else if (leftNeuron.getGroupTag() < 0 && downNeuron.getGroupTag() < 0) {
tmpNeuron.setGroupTag(-1);
} else {
tmpNeuron.setGroupTag(Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
}
}
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
upNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
downNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
leftNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_UP | UPDATE_DOWN | UPDATE_LEFT);
}
break;
case EDGE_X0:
upNeuron = lifNeurons.get( indexX + (indexY + 1) * numOfNeuronsX);
rightNeuron = lifNeurons.get(indexX + 1 + indexY * numOfNeuronsX);
leftNeuron = lifNeurons.get(indexX - 1 + indexY * numOfNeuronsX);
if (upNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (rightNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (leftNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 3) {
tmpNeuron.setGroupTag(-1);
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
upNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
rightNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
leftNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_UP | UPDATE_RIGHT | UPDATE_LEFT);
}
break;
case EDGE_X1:
downNeuron = lifNeurons.get(indexX + (indexY - 1) * numOfNeuronsX);
rightNeuron = lifNeurons.get(indexX + 1 + indexY * numOfNeuronsX);
leftNeuron = lifNeurons.get(indexX - 1 + indexY * numOfNeuronsX);
if (downNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (rightNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (leftNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 3) {
if (rightNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
}
if (leftNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
} else {
if (leftNeuron.getGroupTag() > 0 && downNeuron.getGroupTag() > 0) {
tmpNeuron.setGroupTag(Math.min(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
// do merge here
int targetGroupTag = Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag());
neuronGroups.get(tmpNeuron.getGroupTag()).merge(neuronGroups.get(targetGroupTag));
neuronGroups.remove(targetGroupTag);
} else if (leftNeuron.getGroupTag() < 0 && downNeuron.getGroupTag() < 0) {
tmpNeuron.setGroupTag(-1);
} else {
tmpNeuron.setGroupTag(Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
}
}
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
rightNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
downNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
leftNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_DOWN | UPDATE_RIGHT | UPDATE_LEFT);
}
break;
case INSIDE:
upNeuron = lifNeurons.get( indexX + (indexY + 1) * numOfNeuronsX);
downNeuron = lifNeurons.get(indexX + (indexY - 1) * numOfNeuronsX);
rightNeuron = lifNeurons.get(indexX + 1 + indexY * numOfNeuronsX);
leftNeuron = lifNeurons.get(indexX - 1 + indexY * numOfNeuronsX);
if (upNeuron.isAboveThreshold()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (downNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (rightNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (leftNeuron.isFired()) {
tmpNeuron.increaseNumFiringNeighbors();
}
if (tmpNeuron.getNumFiringNeighbors() > 0 && tmpNeuron.isFired()) {
if (tmpNeuron.getFiringType() != FiringType.FIRING_ON_BORDER) {
tmpNeuron.setFiringType(FiringType.FIRING_WITH_NEIGHBOR);
}
}
if (tmpNeuron.getNumFiringNeighbors() == 4 && tmpNeuron.isFired()) {
if (rightNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
}
if (leftNeuron.getGroupTag() == downNeuron.getGroupTag()) {
tmpNeuron.setGroupTag(downNeuron.getGroupTag());
} else {
if (leftNeuron.getGroupTag() > 0 && downNeuron.getGroupTag() > 0) {
tmpNeuron.setGroupTag(Math.min(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
// do merge here
int targetGroupTag = Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag());
neuronGroups.get(tmpNeuron.getGroupTag()).merge(neuronGroups.get(targetGroupTag));
neuronGroups.remove(targetGroupTag);
} else if (leftNeuron.getGroupTag() < 0 && downNeuron.getGroupTag() < 0) {
tmpNeuron.setGroupTag(-1);
} else {
tmpNeuron.setGroupTag(Math.max(downNeuron.getGroupTag(), leftNeuron.getGroupTag()));
}
}
tmpNeuron.setFiringType(FiringType.FIRING_INSIDE);
upNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.FORCED);
downNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
rightNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
leftNeuron.setFiringTypeToBorder(tmpNeuron.getGroupTag(), FiringTypeUpdate.CHECK);
updateGroup(tmpNeuron, UPDATE_UP | UPDATE_DOWN | UPDATE_RIGHT | UPDATE_LEFT);
}
break;
default:
break;
} // End of switch
if (tmpNeuron.getFiringType() == FiringType.FIRING_INSIDE || tmpNeuron.getFiringType() == FiringType.FIRING_ON_BORDER) {
firingNeurons.add(tmpNeuron.cellNumber);
}
} catch (java.util.ConcurrentModificationException e) {
initFilter();
log.warning(e.getMessage());
}
} // End of while
} // End of if
}
/**
* updates a neuron group with a new member
*
* @param newMemberNeuron : new member neuron
* @param updateOption : option for updating neighbor neurons. Selected neighbors are updated together.
* All neighbor neurons are updated together with option 'UPDATE_UP | UPDATE_DOWN | UPDATE_RIGHT | UPDATE_LEFT'.
*/
private final void updateGroup(LIFNeuron newMemberNeuron, int updateOption) {
NeuronGroup tmpGroup = null;
if (neuronGroups.containsKey(newMemberNeuron.getGroupTag())) {
tmpGroup = neuronGroups.get(newMemberNeuron.getGroupTag());
tmpGroup.add(newMemberNeuron);
} else {
tmpGroup = new NeuronGroup(newMemberNeuron);
neuronGroups.put(tmpGroup.tag, tmpGroup);
}
int indexX = (int) newMemberNeuron.getIndex().x;
int indexY = (int) newMemberNeuron.getIndex().y;
int up = indexX + (indexY + 1) * numOfNeuronsX;
int down = indexX + (indexY - 1) * numOfNeuronsX;
int right = indexX + 1 + indexY * numOfNeuronsX;
int left = indexX - 1 + indexY * numOfNeuronsX;
if ((updateOption & UPDATE_UP) > 0) {
tmpGroup.add(lifNeurons.get(up));
}
if ((updateOption & UPDATE_DOWN) > 0) {
tmpGroup.add(lifNeurons.get(down));
}
if ((updateOption & UPDATE_RIGHT) > 0) {
tmpGroup.add(lifNeurons.get(right));
}
if ((updateOption & UPDATE_LEFT) > 0) {
tmpGroup.add(lifNeurons.get(left));
}
}
public void annotate(GLAutoDrawable drawable) {
if (!isFilterEnabled()) {
return;
}
GL gl = drawable.getGL(); // when we get this we are already set up with scale 1=1 pixel, at LL corner
if (gl == null) {
log.warning("null GL in BlurringFilter2D.annotate");
return;
}
gl.glPushMatrix();
try {
if (showFiringNeurons) {
LIFNeuron tmpNeuron;
for (int i = 0; i < lifNeurons.size(); i++) {
tmpNeuron = lifNeurons.get(i);
if (showBorderNeuronsOnly && tmpNeuron.getFiringType() == FiringType.FIRING_ON_BORDER)
tmpNeuron.drawReceptiveField(drawable);
if (showInsideNeuronsOnly && tmpNeuron.getFiringType() == FiringType.FIRING_INSIDE)
tmpNeuron.drawReceptiveField(drawable);
if (!showBorderNeuronsOnly && !showInsideNeuronsOnly)
if(tmpNeuron.getFiringType() != FiringType.SILENT)
tmpNeuron.drawReceptiveField(drawable);
}
}
} catch (java.util.ConcurrentModificationException e) {
log.warning(e.getMessage());
}
gl.glPopMatrix();
}
@Override
synchronized public void initFilter() {
int prev_numOfNeuronsX = numOfNeuronsX;
int prev_numOfNeuronsY = numOfNeuronsY;
// calculate the required number of neurons
if (2 * mychip.getSizeX() % receptiveFieldSizePixels == 0) {
numOfNeuronsX = (int) (2 * mychip.getSizeX() / receptiveFieldSizePixels) - 1;
} else {
numOfNeuronsX = (int) (2 * mychip.getSizeX() / receptiveFieldSizePixels);
}
if (2 * mychip.getSizeY() % receptiveFieldSizePixels == 0) {
numOfNeuronsY = (int) (2 * mychip.getSizeY() / receptiveFieldSizePixels) - 1;
} else {
numOfNeuronsY = (int) (2 * mychip.getSizeY() / receptiveFieldSizePixels);
}
lastTime = 0;
firingNeurons.clear();
neuronGroups.clear();
numOfGroup = 0;
// initialize all neurons
if ((numOfNeuronsX > 0 && numOfNeuronsY > 0) &&
(prev_numOfNeuronsX != numOfNeuronsX || prev_numOfNeuronsY != numOfNeuronsY)) {
if (!lifNeurons.isEmpty()) {
lifNeurons.clear();
}
for (int j = 0; j < numOfNeuronsY; j++) {
for (int i = 0; i < numOfNeuronsX; i++) {
LIFNeuron newNeuron = new LIFNeuron(i, j);
if (i == 0) {
if (j == 0) {
newNeuron.setLocationType(LocationType.CORNER_00);
} else if (j == numOfNeuronsY - 1) {
newNeuron.setLocationType(LocationType.CORNER_01);
} else {
newNeuron.setLocationType(LocationType.EDGE_0Y);
}
} else if (i == numOfNeuronsX - 1) {
if (j == 0) {
newNeuron.setLocationType(LocationType.CORNER_10);
} else if (j == numOfNeuronsY - 1) {
newNeuron.setLocationType(LocationType.CORNER_11);
} else {
newNeuron.setLocationType(LocationType.EDGE_1Y);
}
} else {
if (j == 0) {
newNeuron.setLocationType(LocationType.EDGE_X0);
} else if (j == numOfNeuronsY - 1) {
newNeuron.setLocationType(LocationType.EDGE_X1);
} else {
newNeuron.setLocationType(LocationType.INSIDE);
}
}
lifNeurons.add(newNeuron.getCellNumber(), newNeuron);
}
}
}
}
@Override
public void resetFilter() {
for (LIFNeuron n : lifNeurons) {
n.reset();
}
lastTime = 0;
firingNeurons.clear();
neuronGroups.clear();
numOfGroup = 0;
}
/**
* returns the time constant of the neuron's membranePotential
*
* @return
*/
public int getMPTimeConstantUs() {
return MPTimeConstantUs;
}
/**
* sets MPTimeConstantUs
*
* @param
*/
public void setMPTimeConstantUs(int MPTimeConstantUs) {
this.MPTimeConstantUs = MPTimeConstantUs;
getPrefs().putInt("BlurringFilter2D.MPTimeConstantUs", MPTimeConstantUs);
}
/**
* returns neuronLifeTimeUs
*
* @return
*/
public int getNeuronLifeTimeUs() {
return neuronLifeTimeUs;
}
/**
* sets neuronLifeTimeUs
*
* @param neuronLifeTimeUs
*/
public void setNeuronLifeTimeUs(int neuronLifeTimeUs) {
this.neuronLifeTimeUs = neuronLifeTimeUs;
getPrefs().putInt("BlurringFilter2D.neuronLifeTimeUs", neuronLifeTimeUs);
}
/**
* returns receptiveFieldSizePixels
*
* @return
*/
public int getReceptiveFieldSizePixels() {
return receptiveFieldSizePixels;
}
/**
* set receptiveFieldSizePixels
*
* @param receptiveFieldSizePixels
*/
synchronized public void setReceptiveFieldSizePixels(int receptiveFieldSizePixels) {
this.receptiveFieldSizePixels = receptiveFieldSizePixels;
getPrefs().putInt("BlurringFilter2D.receptiveFieldSizePixels", receptiveFieldSizePixels);
initFilter();
}
/**
* returns MPThreshold
*
* @return
*/
public int getMPThreshold() {
return MPThreshold;
}
/**
* sets MPThreshold
*
* @param MPThreshold
*/
public void setMPThreshold(int MPThreshold) {
this.MPThreshold = MPThreshold;
getPrefs().putInt("BlurringFilter2D.MPThreshold", MPThreshold);
}
/**
* returns showFiringNeurons
*
* @return
*/
public boolean isShowFiringNeurons() {
return showFiringNeurons;
}
/**
* sets showFiringNeurons
*
* @param showFiringNeurons
*/
public void setShowFiringNeurons(boolean showFiringNeurons) {
this.showFiringNeurons = showFiringNeurons;
getPrefs().putBoolean("BlurringFilter2D.showFiringNeurons", showFiringNeurons);
}
/**
* returns showBorderNeuronsOnly
*
* @return
*/
public boolean isShowBorderNeuronsOnly() {
return showBorderNeuronsOnly;
}
/**
* sets showBorderNeuronsOnly
*
* @param showBorderNeuronsOnly
*/
public void setShowBorderNeuronsOnly(boolean showBorderNeuronsOnly) {
this.showBorderNeuronsOnly = showBorderNeuronsOnly;
getPrefs().putBoolean("BlurringFilter2D.showBorderNeuronsOnly", showBorderNeuronsOnly);
}
/**
* returns showInsideNeuronsOnly
*
* @return
*/
public boolean isShowInsideNeuronsOnly() {
return showInsideNeuronsOnly;
}
/**
* sets showInsideNeuronsOnly
*
* @param showInsideNeuronsOnly
*/
public void setShowInsideNeuronsOnly(boolean showInsideNeuronsOnly) {
this.showInsideNeuronsOnly = showInsideNeuronsOnly;
getPrefs().putBoolean("BlurringFilter2D.showInsideNeuronsOnly", showInsideNeuronsOnly);
}
/**
* returns filledReceptiveField
*
* @return
*/
public boolean isFilledReceptiveField() {
return filledReceptiveField;
}
/**
* sets filledReceptiveField
*
* @param filledReceptiveField
*/
public void setFilledReceptiveField(boolean filledReceptiveField) {
this.filledReceptiveField = filledReceptiveField;
getPrefs().putBoolean("BlurringFilter2D.filledReceptiveField", filledReceptiveField);
}
/**
* returns numOfGroup
*
* @return
*/
public int getNumOfGroup() {
return numOfGroup;
}
/**
* returns collection of neuronGroups
*
* @return
*/
public Collection getNeuronGroups() {
return neuronGroups.values();
}
/**
* returns the last timestamp ever recorded at this filter
*
* @return the last timestamp ever recorded at this filter
*/
public int getLastTime() {
return lastTime;
}
/**
* returns MPJumpAfterFiring
*
* @return
*/
public float getMPJumpAfterFiring() {
return MPJumpAfterFiring;
}
/**
* sets MPJumpAfterFiring
*
* @param MPJumpAfterFiring
*/
public void setMPJumpAfterFiring(float MPJumpAfterFiring) {
this.MPJumpAfterFiring = MPJumpAfterFiring;
getPrefs().putFloat("BlurringFilter2D.MPJumpAfterFiring", MPJumpAfterFiring);
}
/**
* returns colorToDrawRF
*
* @return
*/
public COLOR_CHOICE getColorToDrawRF() {
return colorToDrawRF;
}
/**
* sets colorToDrawRF
*
* @param colorToDrawRF
*/
public void setColorToDrawRF(COLOR_CHOICE colorToDrawRF) {
COLOR_CHOICE old = this.colorToDrawRF;
this.colorToDrawRF = colorToDrawRF;
getPrefs().put("BlurringFilter2D.colorToDrawRF",colorToDrawRF.toString());
colors.get(colorToDrawRF).getRGBComponents(rgb);
}
}
|
package com.jaquadro.minecraft.storagedrawers.block.tile;
import com.jaquadro.minecraft.storagedrawers.StorageDrawers;
import com.jaquadro.minecraft.storagedrawers.api.inventory.IDrawerInventory;
import com.jaquadro.minecraft.storagedrawers.api.storage.IDrawer;
import com.jaquadro.minecraft.storagedrawers.api.storage.IDrawerGroup;
import com.jaquadro.minecraft.storagedrawers.inventory.ISideManager;
import com.jaquadro.minecraft.storagedrawers.inventory.StorageInventory;
import com.jaquadro.minecraft.storagedrawers.network.CountUpdateMessage;
import cpw.mods.fml.common.FMLLog;
import cpw.mods.fml.common.network.NetworkRegistry;
import cpw.mods.fml.common.network.simpleimpl.IMessage;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.Packet;
import net.minecraft.network.play.server.S35PacketUpdateTileEntity;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.MathHelper;
import net.minecraftforge.common.util.Constants;
import net.minecraftforge.common.util.ForgeDirection;
import org.apache.logging.log4j.Level;
import java.util.Iterator;
import java.util.UUID;
public abstract class TileEntityDrawers extends TileEntity implements IDrawerGroup, ISidedInventory
{
private IDrawer[] drawers;
private IDrawerInventory inventory;
private int[] autoSides = new int[] { 0, 1 };
private int direction;
private int drawerCapacity = 1;
private int storageLevel = 1;
private int statusLevel = 0;
private long lastClickTime;
private UUID lastClickUUID;
private NBTTagCompound failureSnapshot;
protected TileEntityDrawers (int drawerCount) {
initWithDrawerCount(drawerCount);
}
protected abstract IDrawer createDrawer (int slot);
protected ISideManager getSideManager () {
return new DefaultSideManager();
}
protected void initWithDrawerCount (int drawerCount) {
drawers = new IDrawer[drawerCount];
for (int i = 0; i < drawerCount; i++)
drawers[i] = createDrawer(i);
inventory = new StorageInventory(this, getSideManager());
}
public int getDirection () {
return direction;
}
public void setDirection (int direction) {
this.direction = direction % 6;
autoSides = new int[] { 0, 1, ForgeDirection.OPPOSITES[direction], 2, 3 };
if (direction == 2 || direction == 3) {
autoSides[3] = 4;
autoSides[4] = 5;
}
}
public int getStorageLevel () {
return storageLevel;
}
public void setStorageLevel (int level) {
this.storageLevel = MathHelper.clamp_int(level, 1, 6);
}
public int getStatusLevel () {
return statusLevel;
}
public void setStatusLevel (int level) {
this.statusLevel = MathHelper.clamp_int(level, 1, 3);
}
public int getDrawerCapacity () {
return drawerCapacity;
}
public void setDrawerCapacity (int stackCount) {
drawerCapacity = stackCount;
}
public ItemStack takeItemsFromSlot (int slot, int count) {
if (slot < 0 || slot >= getDrawerCount())
return null;
ItemStack stack = getItemsFromSlot(slot, count);
if (stack == null)
return null;
IDrawer drawer = drawers[slot];
drawer.setStoredItemCount(drawer.getStoredItemCount() - stack.stackSize);
// TODO: Reset empty drawer in subclasses
return stack;
}
protected ItemStack getItemsFromSlot (int slot, int count) {
if (drawers[slot].isEmpty())
return null;
ItemStack stack = drawers[slot].getStoredItemCopy();
stack.stackSize = Math.min(stack.getMaxStackSize(), count);
stack.stackSize = Math.min(stack.stackSize, drawers[slot].getStoredItemCount());
return stack;
}
public int putItemsIntoSlot (int slot, ItemStack stack, int count) {
if (slot < 0 || slot >= getDrawerCount())
return 0;
IDrawer drawer = drawers[slot];
if (drawer.isEmpty())
drawer.setStoredItem(stack, 0);
if (!drawer.canItemBeStored(stack))
return 0;
int countAdded = Math.min(drawer.getRemainingCapacity(), stack.stackSize);
countAdded = Math.min(countAdded, count);
drawer.setStoredItemCount(drawer.getStoredItemCount() + countAdded);
stack.stackSize -= countAdded;
return countAdded;
}
public int interactPutItemsIntoSlot (int slot, EntityPlayer player) {
int count = 0;
ItemStack currentStack = player.inventory.getCurrentItem();
if (currentStack != null)
count += putItemsIntoSlot(slot, currentStack, currentStack.stackSize);
if (!drawers[slot].isEmpty() && worldObj.getTotalWorldTime() - lastClickTime < 10 && player.getPersistentID().equals(lastClickUUID)) {
for (int i = 0, n = player.inventory.getSizeInventory(); i < n; i++) {
ItemStack subStack = player.inventory.getStackInSlot(i);
if (subStack != null) {
int subCount = putItemsIntoSlot(slot, subStack, subStack.stackSize);
if (subCount > 0 && subStack.stackSize == 0)
player.inventory.setInventorySlotContents(i, null);
count += subCount;
}
}
}
lastClickTime = worldObj.getTotalWorldTime();
lastClickUUID = player.getPersistentID();
markDirty();
return count;
}
protected void trapLoadFailure (Throwable t, NBTTagCompound tag) {
failureSnapshot = (NBTTagCompound)tag.copy();
FMLLog.log(StorageDrawers.MOD_ID, Level.ERROR, t, "Tile Load Failure.");
}
protected void restoreLoadFailure (NBTTagCompound tag) {
Iterator<String> iter = failureSnapshot.func_150296_c().iterator();
while (iter.hasNext()) {
String key = iter.next();
if (!tag.hasKey(key))
tag.setTag(key, failureSnapshot.getTag(key));
}
}
protected boolean loadDidFail () {
return failureSnapshot != null;
}
@Override
public void readFromNBT (NBTTagCompound tag) {
super.readFromNBT(tag);
failureSnapshot = null;
try {
setDirection(tag.getByte("Dir"));
drawerCapacity = tag.getByte("Cap");
storageLevel = tag.getByte("Lev");
statusLevel = 0;
if (tag.hasKey("Stat"))
statusLevel = tag.getByte("Stat");
NBTTagList slots = tag.getTagList("Slots", Constants.NBT.TAG_COMPOUND);
int drawerCount = slots.tagCount();
drawers = new IDrawer[slots.tagCount()];
for (int i = 0, n = drawers.length; i < n; i++) {
NBTTagCompound slot = slots.getCompoundTagAt(i);
drawers[i] = createDrawer(i);
drawers[i].readFromNBT(slot);
}
inventory = new StorageInventory(this, getSideManager());
}
catch (Throwable t) {
trapLoadFailure(t, tag);
}
}
@Override
public void writeToNBT (NBTTagCompound tag) {
super.writeToNBT(tag);
if (failureSnapshot != null) {
restoreLoadFailure(tag);
return;
}
tag.setByte("Dir", (byte)direction);
tag.setByte("Cap", (byte)drawerCapacity);
tag.setByte("Lev", (byte) storageLevel);
if (statusLevel > 0)
tag.setByte("Stat", (byte)statusLevel);
NBTTagList slots = new NBTTagList();
for (IDrawer drawer : drawers) {
NBTTagCompound slot = new NBTTagCompound();
drawer.writeToNBT(slot);
slots.appendTag(slot);
}
tag.setTag("Slots", slots);
}
@Override
public boolean canUpdate () {
return false;
}
@Override
public void markDirty () {
inventory.markDirty();
super.markDirty();
}
public void clientUpdateCount (int slot, int count) {
IDrawer drawer = getDrawer(slot);
if (drawer.getStoredItemCount() != count) {
drawer.setStoredItemCount(count);
getWorldObj().func_147479_m(xCoord, yCoord, zCoord); // markBlockForRenderUpdate
}
}
private void syncClientCount (int slot) {
IMessage message = new CountUpdateMessage(xCoord, yCoord, zCoord, slot, drawers[slot].getStoredItemCount());
NetworkRegistry.TargetPoint targetPoint = new NetworkRegistry.TargetPoint(worldObj.provider.dimensionId, xCoord, yCoord, zCoord, 500);
StorageDrawers.network.sendToAllAround(message, targetPoint);
}
// TODO: Eventually eliminate these expensive network updates
@Override
public Packet getDescriptionPacket () {
NBTTagCompound tag = new NBTTagCompound();
writeToNBT(tag);
return new S35PacketUpdateTileEntity(xCoord, yCoord, zCoord, 5, tag);
}
@Override
public void onDataPacket (NetworkManager net, S35PacketUpdateTileEntity pkt) {
readFromNBT(pkt.func_148857_g());
getWorldObj().func_147479_m(xCoord, yCoord, zCoord); // markBlockForRenderUpdate
}
@Override
public int getDrawerCount () {
return drawers.length;
}
@Override
public IDrawer getDrawer (int slot) {
if (slot < 0 || slot >= drawers.length)
return null;
return drawers[slot];
}
@Override
public boolean isDrawerEnabled (int slot) {
return getDrawer(slot) != null;
}
@Override
public int[] getAccessibleSlotsFromSide (int side) {
return inventory.getAccessibleSlotsFromSide(side);
}
@Override
public boolean canInsertItem (int slot, ItemStack stack, int side) {
return inventory.canInsertItem(slot, stack, side);
}
@Override
public boolean canExtractItem (int slot, ItemStack stack, int side) {
return inventory.canExtractItem(slot, stack, side);
}
@Override
public int getSizeInventory () {
return inventory.getSizeInventory();
}
@Override
public ItemStack getStackInSlot (int slot) {
return inventory.getStackInSlot(slot);
}
@Override
public ItemStack decrStackSize (int slot, int count) {
return inventory.decrStackSize(slot, count);
}
@Override
public ItemStack getStackInSlotOnClosing (int slot) {
return inventory.getStackInSlotOnClosing(slot);
}
@Override
public void setInventorySlotContents (int slot, ItemStack stack) {
inventory.setInventorySlotContents(slot, stack);
}
@Override
public String getInventoryName () {
return inventory.getInventoryName();
}
@Override
public boolean hasCustomInventoryName () {
return inventory.hasCustomInventoryName();
}
@Override
public int getInventoryStackLimit () {
return inventory.getInventoryStackLimit();
}
@Override
public boolean isUseableByPlayer (EntityPlayer player) {
return inventory.isUseableByPlayer(player);
}
@Override
public void openInventory () {
inventory.openInventory();
}
@Override
public void closeInventory () {
inventory.closeInventory();
}
@Override
public boolean isItemValidForSlot (int slot, ItemStack stack) {
return inventory.isItemValidForSlot(slot, stack);
}
private class DefaultSideManager implements ISideManager
{
@Override
public int[] getSlotsForSide (int side) {
return autoSides;
}
}
}
|
package com.opera.core.systems.runner.launcher;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closeables;
import com.google.common.io.Files;
import com.opera.core.systems.OperaPaths;
import com.opera.core.systems.OperaProduct;
import com.opera.core.systems.arguments.OperaCoreArguments;
import com.opera.core.systems.arguments.OperaDesktopArguments;
import com.opera.core.systems.arguments.interfaces.OperaArguments;
import com.opera.core.systems.runner.OperaLaunchers;
import com.opera.core.systems.runner.OperaRunnerException;
import com.opera.core.systems.runner.OperaRunnerSettings;
import org.openqa.selenium.Platform;
import org.openqa.selenium.WebDriverException;
import org.openqa.selenium.os.CommandLine;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
public class OperaLauncherRunnerSettings extends OperaRunnerSettings {
public static final Logger logger = Logger.getLogger(OperaLauncherRunnerSettings.class.getName());
protected File launcher;
public OperaLauncherRunnerSettings() {
super();
}
public void setLoggingLevel(Level level) {
loggingLevel = OperaLauncherRunner.toLauncherLoggingLevel(level);
}
public File getLauncher() {
if (launcher == null) {
launcher = new File(launcherPath());
}
return launcher;
}
public void setLauncher(File launcher) {
if (launcher.exists() && launcher.isFile() && launcher.canExecute()) {
this.launcher = launcher;
} else {
throw new OperaRunnerException("Invalid launcher: " + launcher);
}
}
public static OperaLauncherRunnerSettings getDefaultSettings() {
OperaLauncherRunnerSettings settings = new OperaLauncherRunnerSettings();
OperaArguments arguments;
if (settings.getProduct().is(OperaProduct.DESKTOP)) {
arguments = new OperaDesktopArguments();
} else {
arguments = new OperaCoreArguments();
}
settings.setArguments(arguments);
return settings;
}
/**
* This method will try to locate the launcher on any system. If the OPERA_LAUNCHER environment
* variable is set but invalid, it will throw an exception. If that is not the case, it will
* attempt to extract the launcher from the resources of the launcher JAR that is bundled with
* OperaDriver.
*
* @return the path to the launcher
* @throws org.openqa.selenium.WebDriverException
* if launcher is not found
*/
private static String launcherPath() {
String path = System.getenv("OPERA_LAUNCHER");
if (!OperaPaths.isPathValid(path)) {
if (path != null && !path.isEmpty()) {
throw new OperaRunnerException("Path from OPERA_LAUNCHER does not exist: " + path);
}
try {
String userHome = System.getProperty("user.home");
path = extractLauncher(new File(userHome + File.separator + ".launcher"));
} catch (OperaRunnerException e) {
throw new WebDriverException("Unable to extract bundled launcher: " + e.getMessage());
}
}
return path;
}
/**
* Extracts the launcher from the launcher JAR bundled with OperaDriver into the directory
* specified. If the launcher in that location is outdated, it will be updated/replaced.
*
* @param launcherPath directory where you wish to put the launcher
* @return path to the launcher executable
*/
private static String extractLauncher(File launcherPath) {
String launcherName = getLauncherNameForOS();
File
targetLauncher =
new File(launcherPath.getAbsolutePath() + File.separatorChar + launcherName);
// Whether we need to copy a new launcher across, either because it doesn't currently exist, or
// because its hash differs from our launcher.
boolean copy;
// Get the launcher resource from JAR.
URL
sourceLauncher =
OperaLaunchers.class.getClassLoader().getResource("launchers/" + launcherName);
// Does launcher exist among our resources?
if (sourceLauncher == null) {
throw new OperaRunnerException("Unknown file: " + sourceLauncher);
}
// Copy the launcher if it doesn't exist or if the current launcher on the system doesn't match
// the one bundled with OperaDriver (launcher needs to be upgraded).
if (targetLauncher.exists()) {
try {
copy = !Arrays.equals(md5(targetLauncher), md5(sourceLauncher.openStream()));
if (copy) {
logger.fine("Old launcher detected, upgrading");
}
} catch (NoSuchAlgorithmException e) {
throw new OperaRunnerException("Algorithm is not available in your environment: " + e);
} catch (IOException e) {
throw new OperaRunnerException("Unable to open stream or file: " + e);
}
} else {
logger.fine("No launcher found, copying");
copy = true;
}
if (copy) {
InputStream is = null;
OutputStream os = null;
try {
if (!targetLauncher.exists()) {
launcherPath.mkdirs();
Files.touch(targetLauncher);
}
is = sourceLauncher.openStream();
os = new FileOutputStream(targetLauncher);
ByteStreams.copy(is, os);
if (!targetLauncher.setLastModified(targetLauncher.lastModified())) {
throw new OperaRunnerException(
"Unable to set modification time for file: " + targetLauncher);
}
} catch (IOException e) {
throw new WebDriverException("Cannot write file to disk: " + e.getMessage());
} finally {
if (is != null) {
Closeables.closeQuietly(is);
}
if (os != null) {
Closeables.closeQuietly(os);
}
}
logger.fine("New launcher copied to " + targetLauncher.getAbsolutePath());
}
if (copy) {
makeLauncherExecutable(targetLauncher);
}
return targetLauncher.getAbsolutePath();
}
public void makeLauncherExecutable() {
makeLauncherExecutable(getLauncher());
}
/**
* Makes the launcher executable by chmod'ing the file at given path (GNU/Linux and Mac only).
*
* @param launcher the file to make executable
*/
private static void makeLauncherExecutable(File launcher) {
Platform current = Platform.getCurrent();
if (current.is(Platform.UNIX) || current.is(Platform.MAC)) {
CommandLine line = new CommandLine("chmod", "u+x", launcher.getAbsolutePath());
line.execute();
}
}
/**
* Get the launcher's binary file name based on what flavour of operating system and what kind of
* architecture the user is using.
*
* @return the launcher's binary file name
*/
protected static String getLauncherNameForOS() {
boolean is64 = "64".equals(System.getProperty("sun.arch.data.model"));
Platform currentPlatform = Platform.getCurrent();
switch (currentPlatform) {
case LINUX:
case UNIX:
return (is64 ? "launcher-linux-x86_64" : "launcher-linux-i686");
case MAC:
return "launcher-mac";
case WINDOWS:
case VISTA:
case XP:
return "launcher-win32-i86pc.exe";
default:
throw new WebDriverException(
"Could not find a platform that supports bundled launchers, please set it manually");
}
}
/**
* Check that the given path is not null, and exists.
*
* @param path the path to check
* @return true if the path is valid, false otherwise
*/
private boolean isPathValid(String path) {
if (path == null) {
return false;
}
if (path.length() == 0) {
return false;
}
File file = new File(path);
return (file.exists());
}
/**
* Get the MD5 hash of the given stream.
*
* @param fis the input stream to use
* @return a byte array of the MD5 hash
* @throws NoSuchAlgorithmException if MD5 is not available
* @throws IOException if an I/O error occurs
*/
private static byte[] md5(InputStream fis) throws NoSuchAlgorithmException, IOException {
return ByteStreams.getDigest(ByteStreams.newInputStreamSupplier(ByteStreams.toByteArray(fis)),
MessageDigest.getInstance("MD5"));
}
/**
* Get the MD5 hash of the given file.
*
* @param file file to compute a hash on
* @return a byte array of the MD5 hash
* @throws IOException if file cannot be found
* @throws NoSuchAlgorithmException if MD5 is not available
*/
private static byte[] md5(File file) throws NoSuchAlgorithmException, IOException {
return Files.getDigest(file, MessageDigest.getInstance("MD5"));
}
}
|
package compiler.language.parser.rules.statement;
import static compiler.language.parser.ParseType.BLOCK;
import static compiler.language.parser.ParseType.EXPRESSION;
import static compiler.language.parser.ParseType.SYNCHRONIZED_KEYWORD;
import static compiler.language.parser.ParseType.SYNCHRONIZED_STATEMENT;
import compiler.language.ast.ParseInfo;
import compiler.language.ast.expression.Expression;
import compiler.language.ast.statement.Block;
import compiler.language.ast.statement.SynchronizedStatement;
import compiler.parser.ParseException;
import compiler.parser.Rule;
/**
* @author Anthony Bryant
*/
public class SynchronizedStatementRule extends Rule
{
private static final Object[] PRODUCTION = new Object[] {SYNCHRONIZED_KEYWORD, EXPRESSION, BLOCK};
private static final Object[] NO_EXPRESSION_PRODUCTION = new Object[] {SYNCHRONIZED_KEYWORD, BLOCK};
public SynchronizedStatementRule()
{
super(SYNCHRONIZED_STATEMENT, PRODUCTION, NO_EXPRESSION_PRODUCTION);
}
/**
* {@inheritDoc}
* @see compiler.parser.Rule#match(java.lang.Object[], java.lang.Object[])
*/
@Override
public Object match(Object[] types, Object[] args) throws ParseException
{
if (types == PRODUCTION)
{
Expression expression = (Expression) args[1];
Block block = (Block) args[2];
return new SynchronizedStatement(expression, block,
ParseInfo.combine((ParseInfo) args[0], expression.getParseInfo(), block.getParseInfo()));
}
if (types == NO_EXPRESSION_PRODUCTION)
{
Block block = (Block) args[1];
return new SynchronizedStatement(null, block,
ParseInfo.combine((ParseInfo) args[0], block.getParseInfo()));
}
throw badTypeList();
}
}
|
package com.facebook.litho.testing.viewtree;
import javax.annotation.Nullable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.regex.Pattern;
import android.annotation.TargetApi;
import android.graphics.Canvas;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.facebook.litho.ComponentHost;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import org.robolectric.Shadows;
import org.robolectric.shadows.ShadowCanvas;
/**
* A collection of useful predicates over Android views for tests
*/
final class ViewPredicates {
private ViewPredicates() {}
/**
* Returns a predicate that returns true if the applied on view's text is equal to the given
* text.
* substring.
* @param predicate the predicate with which to test the text
* @return the predicate
*/
public static Predicate<View> hasTextMatchingPredicate(final Predicate<String> predicate) {
return new Predicate<View>() {
@Override
public boolean apply(final View input) {
if (predicate.apply(extractString(input))) {
return true;
}
if (input instanceof ComponentHost) {
return ComponentQueries.hasTextMatchingPredicate((ComponentHost) input, predicate);
}
return false;
}
};
}
/**
* Returns a predicate that returns true if the applied on view's text is equal to the given
* text.
* substring.
* @param text the text to check
* @return the predicate
*/
public static Predicate<View> hasText(final String text) {
return hasTextMatchingPredicate(Predicates.equalTo(text));
}
public static Predicate<View> hasTag(final int tagId, final Object tagValue) {
return new Predicate<View>() {
@Override
public boolean apply(final View input) {
final Object tag = input.getTag(tagId);
return tag != null && tag.equals(tagValue);
}
};
}
public static Predicate<View> hasContentDescription(final String contentDescription) {
return new Predicate<View>() {
@Override
public boolean apply(final View input) {
if (input instanceof ComponentHost) {
final List<CharSequence> contentDescriptions =
((ComponentHost) input).getContentDescriptions();
return contentDescriptions.contains(contentDescription);
}
return contentDescription.equals(input.getContentDescription());
}
};
}
public static Predicate<View> hasVisibleText(final String text) {
return Predicates.and(isVisible(), hasText(text));
}
public static Predicate<View> hasVisibleTextWithTag(
final String text,
final int tagId,
final Object tagValue) {
return Predicates.and(hasVisibleText(text), hasTag(tagId, tagValue));
}
public static Predicate<View> matchesText(final String text) {
final Pattern pattern = Pattern.compile(text);
return new Predicate<View>() {
@Override
public boolean apply(final View input) {
if (pattern.matcher(extractString(input)).find()) {
return true;
}
if (input instanceof ComponentHost) {
return ComponentQueries.matchesPattern((ComponentHost) input, pattern);
}
return false;
}
};
}
public static Predicate<View> hasVisibleMatchingText(final String text) {
return Predicates.and(isVisible(), matchesText(text));
}
public static Predicate<View> isVisible() {
return new Predicate<View>() {
@Override
public boolean apply(final View input) {
return input.getVisibility() == View.VISIBLE;
}
};
}
@SuppressWarnings("unchecked")
public static Predicate<View> isClass(final Class<? extends View> clazz) {
return (Predicate<View>) (Predicate<?>) Predicates.instanceOf(clazz);
}
/**
* Tries to extract the description of a drawn drawable from a canvas
*/
static String getDrawnDrawableDescription(final Drawable drawable) {
final Canvas canvas = new Canvas();
drawable.draw(canvas);
final ShadowCanvas shadowCanvas = Shadows.shadowOf(canvas);
return shadowCanvas.getDescription();
}
private static String extractString(final View view) {
if (!(view instanceof TextView)) {
return "";
}
final CharSequence text = ((TextView) view).getText();
return text != null ? text.toString() : "";
}
public static Predicate<View> hasDrawable(final Drawable drawable) {
return new Predicate<View>() {
@Override
public boolean apply(@Nullable final View input) {
|
package dr.app.beagle.evomodel.sitemodel;
import beagle.Beagle;
import dr.app.beagle.evomodel.substmodel.EigenDecomposition;
import dr.app.beagle.evomodel.substmodel.FrequencyModel;
import dr.app.beagle.evomodel.substmodel.SubstitutionModel;
import dr.app.beagle.evomodel.treelikelihood.BufferIndexHelper;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.inference.model.AbstractModel;
import dr.inference.model.Model;
import dr.inference.model.Parameter;
import dr.inference.model.Variable;
/**
* @author Andrew Rambaut
* @author Alexei Drummond
* @version $Id$
*/
public class HomogenousBranchSubstitutionModel extends AbstractModel implements BranchSubstitutionModel {
public HomogenousBranchSubstitutionModel(SubstitutionModel substModel, FrequencyModel frequencyModel) {
super("HomogenousBranchSubstitutionModel");
this.substModel = substModel;
addModel(substModel);
this.frequencyModel = frequencyModel;
addModel(frequencyModel);
}
/**
* Homogenous model - returns the same substitution model for all branches/categories
*
* @param branchIndex
* @param categoryIndex
* @return
*/
public EigenDecomposition getEigenDecomposition(int branchIndex, int categoryIndex) {
return substModel.getEigenDecomposition();
}
@Override
public void setEigenDecomposition(Beagle beagle, int eigenIndex, BufferIndexHelper bufferHelper, int dummy) {
EigenDecomposition ed = getEigenDecomposition(eigenIndex, dummy);
beagle.setEigenDecomposition(
// offsetIndex,
eigenIndex,
ed.getEigenVectors(),
ed.getInverseEigenVectors(),
ed.getEigenValues());
}
public SubstitutionModel getSubstitutionModel(int branchIndex, int categoryIndex) {
return substModel;
}
/**
* Homogenous model - returns the same frequency model for all categories
*
* @param categoryIndex
* @return
*/
public double[] getStateFrequencies(int categoryIndex) {
return frequencyModel.getFrequencies();
}
/**
* Homogenous model - returns if substitution model can return complex diagonalization
*
* @return
*/
public boolean canReturnComplexDiagonalization() {
return substModel.canReturnComplexDiagonalization();
}
/**
* Homogenous model - always returns model 0
*
* @param tree
* @param node
* @return
*/
public int getBranchIndex(final Tree tree, final NodeRef node, int bufferIndex) {
return 0;
}
public int getEigenCount() {
return 1;
}
private final SubstitutionModel substModel;
private final FrequencyModel frequencyModel;
protected void handleModelChangedEvent(Model model, Object object, int index) {
fireModelChanged();
}
protected void handleVariableChangedEvent(Variable variable, int index, Parameter.ChangeType type) {
}
protected void storeState() {
}
protected void restoreState() {
}
protected void acceptState() {
}
public void updateTransitionMatrices( Beagle beagle,
int eigenIndex,
BufferIndexHelper bufferHelper,
final int[] probabilityIndices,
final int[] firstDerivativeIndices,
final int[] secondDervativeIndices,
final double[] edgeLengths,
int count) {
beagle.updateTransitionMatrices(eigenIndex, probabilityIndices, firstDerivativeIndices,
secondDervativeIndices, edgeLengths, count);
}
@Override
public int getExtraBufferCount() {
// TODO Auto-generated method stub
return 0;
}
@Override
public void setFirstBuffer(int bufferCount) {
// TODO Auto-generated method stub
}
}
|
package dr.inference.operators;
import cern.colt.matrix.impl.DenseDoubleMatrix2D;
import cern.colt.matrix.linalg.SingularValueDecomposition;
import dr.inference.model.MatrixParameter;
import dr.inference.model.Parameter;
import dr.math.MathUtils;
import dr.math.matrixAlgebra.CholeskyDecomposition;
import dr.math.matrixAlgebra.IllegalDimension;
import dr.math.matrixAlgebra.SymmetricMatrix;
import dr.util.Transform;
import dr.xml.AbstractXMLObjectParser;
import dr.xml.AttributeRule;
import dr.xml.ElementRule;
import dr.xml.XMLObject;
import dr.xml.XMLObjectParser;
import dr.xml.XMLParseException;
import dr.xml.XMLSyntaxRule;
/**
* @author Guy Baele
* @author Marc A. Suchard
*/
public class AdaptableVarianceMultivariateNormalOperator extends AbstractCoercableOperator {
public static final String AVMVN_OPERATOR = "adaptableVarianceMultivariateNormalOperator";
public static final String SCALE_FACTOR = "scaleFactor";
public static final String BETA = "beta";
public static final String INITIAL = "initial";
public static final String BURNIN = "burnin";
public static final String UPDATE_EVERY = "updateEvery";
public static final String FORM_XTX = "formXtXInverse";
public static final String COEFFICIENT = "coefficient";
public static final boolean DEBUG = false;
private double scaleFactor;
private double beta;
private int iterations, updates, initial, burnin, every;
private final Parameter parameter;
private final Transform[] transformations;
private final int dim;
// private final double constantFactor;
private double[] oldMeans, newMeans;
final double[][] matrix;
private double[][] empirical;
private double[][] cholesky;
// temporary storage, allocated once.
private double[] epsilon;
private double[][] proposal;
public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, double scaleFactor, double[][] inMatrix,
double weight, double beta, int initial, int burnin, int every, CoercionMode mode, boolean isVarianceMatrix) {
super(mode);
this.scaleFactor = scaleFactor;
this.parameter = parameter;
this.transformations = transformations;
this.beta = beta;
this.iterations = 0;
this.updates = 0;
setWeight(weight);
dim = parameter.getDimension();
// constantFactor = Math.pow(2.38, 2) / ((double) dim); // not necessary because scaleFactor is auto-tuned
this.initial = initial;
this.burnin = burnin;
this.every = every;
this.empirical = new double[dim][dim];
this.oldMeans = new double[dim];
this.newMeans = new double[dim];
this.epsilon = new double[dim];
this.proposal = new double[dim][dim];
SingularValueDecomposition svd = new SingularValueDecomposition(new DenseDoubleMatrix2D(inMatrix));
if (inMatrix[0].length != svd.rank()) {
throw new RuntimeException("Variance matrix in AdaptableVarianceMultivariateNormalOperator is not of full rank");
}
if (isVarianceMatrix) {
matrix = inMatrix;
} else {
matrix = formXtXInverse(inMatrix);
}
/*System.err.println("matrix initialization: ");
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix.length; j++) {
System.err.print(matrix[i][j] + " ");
}
System.err.println();
}*/
try {
cholesky = (new CholeskyDecomposition(matrix)).getL();
} catch (IllegalDimension illegalDimension) {
throw new RuntimeException("Unable to decompose matrix in AdaptableVarianceMultivariateNormalOperator");
}
}
public AdaptableVarianceMultivariateNormalOperator(Parameter parameter, Transform[] transformations, double scaleFactor,
MatrixParameter varMatrix, double weight, double beta, int initial, int burnin, int every, CoercionMode mode, boolean isVariance) {
this(parameter, transformations, scaleFactor, varMatrix.getParameterAsMatrix(), weight, beta, initial, burnin, every, mode, isVariance);
}
private double[][] formXtXInverse(double[][] X) {
int N = X.length;
int P = X[0].length;
double[][] matrix = new double[P][P];
for (int i = 0; i < P; i++) {
for (int j = 0; j < P; j++) {
int total = 0;
for (int k = 0; k < N; k++) {
total += X[k][i] * X[k][j];
}
matrix[i][j] = total;
}
}
// Take inverse
matrix = new SymmetricMatrix(matrix).inverse().toComponents();
return matrix;
}
private double calculateCovariance(int number, double currentMatrixEntry, double[] values, int firstIndex, int secondIndex) {
// number will always be > 1 here
double result = currentMatrixEntry * (number - 1);
result += (values[firstIndex] * values[secondIndex]);
result += ((number - 1) * oldMeans[firstIndex] * oldMeans[secondIndex] - number * newMeans[firstIndex] * newMeans[secondIndex]);
result /= ((double) number);
return result;
}
public double doOperation() throws OperatorFailedException {
iterations++;
if (DEBUG) {
System.err.println("\nIteration: " + iterations);
}
//System.err.println("Using AdaptableVarianceMultivariateNormalOperator: " + iterations + " for " + parameter.getParameterName());
/*System.err.println("Old parameter values:");
for (int i = 0; i < dim; i++) {
System.err.println(parameter.getParameterValue(i));
}*/
double[] x = parameter.getParameterValues();
//transform to the appropriate scale
double[] transformedX = new double[dim];
for (int i = 0; i < dim; i++) {
transformedX[i] = transformations[i].transform(x[i]);
}
//store MH-ratio in logq
double logJacobian = 0.0;
//change this: make a rule for when iterations == burnin
if (iterations > 1 && iterations > burnin) {
if (DEBUG) {
System.err.println(" iterations > burnin");
}
if (iterations > (burnin+1)) {
if (iterations % every == 0) {
updates++;
//first recalculate the means using recursion
for (int i = 0; i < dim; i++) {
newMeans[i] = ((oldMeans[i] * (updates - 1)) + transformedX[i]) / updates;
}
//here we can simply use the double[][] matrix
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) {
empirical[i][j] = calculateCovariance(updates, empirical[i][j], transformedX, i, j);
empirical[j][i] = empirical[i][j];
}
}
/*System.err.println("Empirical covariance matrix:");
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
System.err.print(empirical[i][j] + " ");
}
System.err.println();
}*/
}
} else if (iterations == (burnin+1)) {
updates++;
//System.err.println("Iteration: " + iterations);
//i.e. iterations == burnin+1, i.e. first sample for C_t
//this will not be reached when burnin is set to 0
for (int i = 0; i < dim; i++) {
oldMeans[i] = transformedX[i];
newMeans[i] = transformedX[i];
//System.err.println("oldMean " + i + ": " + oldMeans[i]);
//System.err.println("newMean " + i + ": " + oldMeans[i]);
}
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
empirical[i][j] = 0.0;
}
}
}
} else if (iterations == 1) {
if (DEBUG) {
System.err.println(" iterations == 1");
}
//System.err.println("Iteration: " + iterations);
//iterations == 1
for (int i = 0; i < dim; i++) {
oldMeans[i] = transformedX[i];
newMeans[i] = transformedX[i];
//System.err.println("oldMean " + i + ": " + oldMeans[i]);
//System.err.println("newMean " + i + ": " + oldMeans[i]);
}
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
empirical[i][j] = 0.0;
}
}
}
for (int i = 0; i < dim; i++) {
epsilon[i] = scaleFactor * MathUtils.nextGaussian();
}
if (iterations > initial) {
if (DEBUG) {
System.err.println(" iterations > initial");
}
if (iterations % every == 0) {
// TODO: For speed, it may not be necessary to update decomposition each and every iteration
//double start = System.nanoTime();
// double[][] proposal = new double[dim][dim];
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) { // symmetric matrix
proposal[j][i] = proposal[i][j] = (1 - beta) * // constantFactor * /* auto-tuning using scaleFactor */
empirical[i][j] + beta * matrix[i][j];
}
}
// not necessary for first test phase, but will need to be performed when covariance matrix is being updated
try {
cholesky = (new CholeskyDecomposition(proposal)).getL();
} catch (IllegalDimension illegalDimension) {
throw new RuntimeException("Unable to decompose matrix in AdaptableVarianceMultivariateNormalOperator");
}
//double end = System.nanoTime();
//double baseResult = end - start;
//System.err.println("Cholesky decomposition took: " + baseResult);
}
}
if (DEBUG) {
System.err.println(" Drawing new values");
}
for (int i = 0; i < dim; i++) {
for (int j = i; j < dim; j++) {
transformedX[i] += cholesky[j][i] * epsilon[j];
// caution: decomposition returns lower triangular
}
if (MULTI) {
parameter.setParameterValueQuietly(i, transformations[i].inverse(transformedX[i]));
} else {
parameter.setParameterValue(i, transformations[i].inverse(transformedX[i]));
}
//this should be correct
//logJacobian += transformations[i].getLogJacobian(parameter.getParameterValue(i)) - transformations[i].getLogJacobian(x[i]);
logJacobian += transformations[i].getLogJacobian(x[i]) - transformations[i].getLogJacobian(parameter.getParameterValue(i));
}
/*for (int i = 0; i < dim; i++) {
System.err.println(oldX[i] + " -> " + parameter.getValue(i));
}*/
if (MULTI) {
parameter.fireParameterChangedEvent(); // Signal once.
}
if (iterations % every == 0) {
if (DEBUG) {
System.err.println(" Copying means");
}
//copy new means to old means for next update iteration
//System.arraycopy(newMeans, 0, oldMeans, 0, dim);
double[] tmp = oldMeans;
oldMeans = newMeans;
newMeans = tmp; // faster to swap pointers
}
//System.err.println("scale factor: " + scaleFactor);
/*System.err.println("New parameter values:");
for (int i = 0; i < dim; i++) {
System.err.println(parameter.getParameterValue(i));
}*/
//System.err.println("log(Jacobian): " + logJacobian);
//return 0.0;
return logJacobian;
}
public static final boolean MULTI = true;
//MCMCOperator INTERFACE
public final String getOperatorName() {
return "adaptableVarianceMultivariateNormal(" + parameter.getParameterName() + ")";
}
public double getCoercableParameter() {
return Math.log(scaleFactor);
}
public void setCoercableParameter(double value) {
scaleFactor = Math.exp(value);
}
public double getRawParameter() {
return scaleFactor;
}
public double getScaleFactor() {
return scaleFactor;
}
public double getTargetAcceptanceProbability() {
return 0.234;
}
public double getMinimumAcceptanceLevel() {
return 0.1;
}
public double getMaximumAcceptanceLevel() {
return 0.4;
}
public double getMinimumGoodAcceptanceLevel() {
return 0.20;
}
public double getMaximumGoodAcceptanceLevel() {
return 0.30;
}
public final String getPerformanceSuggestion() {
double prob = MCMCOperator.Utils.getAcceptanceProbability(this);
double targetProb = getTargetAcceptanceProbability();
dr.util.NumberFormatter formatter = new dr.util.NumberFormatter(5);
double sf = OperatorUtils.optimizeWindowSize(scaleFactor, prob, targetProb);
if (prob < getMinimumGoodAcceptanceLevel()) {
return "Try setting scaleFactor to about " + formatter.format(sf);
} else if (prob > getMaximumGoodAcceptanceLevel()) {
return "Try setting scaleFactor to about " + formatter.format(sf);
} else return "";
}
public static XMLObjectParser PARSER = new AbstractXMLObjectParser() {
public String getParserName() {
return AVMVN_OPERATOR;
}
public Object parseXMLObject(XMLObject xo) throws XMLParseException {
//System.err.println("Parsing AdaptableVarianceMultivariateNormalOperator.");
CoercionMode mode = CoercionMode.parseMode(xo);
double weight = xo.getDoubleAttribute(WEIGHT);
double beta = xo.getDoubleAttribute(BETA);
int initial = xo.getIntegerAttribute(INITIAL);
double scaleFactor = xo.getDoubleAttribute(SCALE_FACTOR);
double coefficient = xo.getDoubleAttribute(COEFFICIENT);
int burnin = 0;
int every = 1;
if (xo.hasAttribute(BURNIN)) {
burnin = xo.getIntegerAttribute(BURNIN);
}
if (burnin > initial || burnin < 0) {
throw new XMLParseException("burnin must be smaller than the initial period");
}
if (xo.hasAttribute(UPDATE_EVERY)) {
every = xo.getIntegerAttribute(UPDATE_EVERY);
}
if (every <= 0) {
throw new XMLParseException("sample needs to be taken at least every single iteration");
}
if (scaleFactor <= 0.0) {
throw new XMLParseException("scaleFactor must be greater than 0.0");
}
Parameter parameter = (Parameter) xo.getChild(Parameter.class);
boolean formXtXInverse = xo.getAttribute(FORM_XTX, false);
//varMatrix needs to be initialized
int dim = parameter.getDimension();
System.err.println("Dimension: " + dim);
if (initial <= 2 * dim) {
initial = 2 * dim;
}
Parameter[] init = new Parameter[dim];
for (int i = 0; i < dim; i++) {
init[i] = new Parameter.Default(dim, 0.0);
}
for (int i = 0; i < dim; i++) {
init[i].setParameterValue(i, Math.pow(coefficient, 2) / ((double) dim));
}
MatrixParameter varMatrix = new MatrixParameter(null, init);
/*double[][] test = varMatrix.getParameterAsMatrix();
for (int i = 0; i < dim; i++) {
for (int j = 0; j < dim; j++) {
System.err.print(test[i][j] + " ");
}
System.err.println();
}*/
Transform[] transformations = new Transform[dim];
for (int i = 0; i < dim; i++) {
transformations[i] = Transform.NONE;
}
for (int i = 0; i < xo.getChildCount(); i++) {
Object child = xo.getChild(i);
if (child instanceof Transform.ParsedTransform) {
Transform.ParsedTransform thisObject = (Transform.ParsedTransform) child;
System.err.println("Transformations:");
for (int j = thisObject.start; j < thisObject.end; ++j) {
transformations[j] = thisObject.transform;
System.err.print(transformations[j].getTransformName() + " ");
}
System.err.println();
}
}
/*for (int i = 0; i < dim; i++) {
System.err.println(transformations[i]);
}*/
// Make sure varMatrix is square and dim(varMatrix) = dim(parameter)
if (!formXtXInverse) {
if (varMatrix.getColumnDimension() != varMatrix.getRowDimension())
throw new XMLParseException("The variance matrix is not square");
}
if (varMatrix.getColumnDimension() != parameter.getDimension())
throw new XMLParseException("The parameter and variance matrix have differing dimensions");
/*java.util.logging.Logger.getLogger("dr.inference").info("\nCreating the adaptable variance multivariate normal operator:" +
"\n beta = " + beta + "\n initial = " + initial + "\n burnin = " + burnin + "\n every = " + every +
"\n If you use this operator, please cite: " +
" Guy Baele, Philippe Lemey, Marc A. Suchard. 2014. In preparation.");*/
return new AdaptableVarianceMultivariateNormalOperator(parameter, transformations, scaleFactor, varMatrix, weight, beta, initial, burnin, every, mode, !formXtXInverse);
}
|
package org.endeavourhealth.queuereader;
import OpenPseudonymiser.Crypto;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import org.apache.commons.csv.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.endeavourhealth.common.cache.ObjectMapperPool;
import org.endeavourhealth.common.config.ConfigManager;
import org.endeavourhealth.common.fhir.*;
import org.endeavourhealth.common.utility.FileHelper;
import org.endeavourhealth.common.utility.FileInfo;
import org.endeavourhealth.common.utility.SlackHelper;
import org.endeavourhealth.core.configuration.ConfigDeserialiser;
import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig;
import org.endeavourhealth.core.configuration.QueueReaderConfiguration;
import org.endeavourhealth.core.csv.CsvHelper;
import org.endeavourhealth.core.database.dal.DalProvider;
import org.endeavourhealth.core.database.dal.admin.ServiceDalI;
import org.endeavourhealth.core.database.dal.admin.models.Service;
import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI;
import org.endeavourhealth.core.database.dal.audit.ExchangeDalI;
import org.endeavourhealth.core.database.dal.audit.models.*;
import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI;
import org.endeavourhealth.core.database.dal.eds.PatientSearchDalI;
import org.endeavourhealth.core.database.dal.ehr.ResourceDalI;
import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper;
import org.endeavourhealth.core.database.dal.publisherTransform.SourceFileMappingDalI;
import org.endeavourhealth.core.database.dal.publisherTransform.models.ResourceFieldMapping;
import org.endeavourhealth.core.database.dal.publisherTransform.models.ResourceFieldMappingAudit;
import org.endeavourhealth.core.database.dal.reference.PostcodeDalI;
import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup;
import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseAgeUpdaterlDalI;
import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseIdDalI;
import org.endeavourhealth.core.database.dal.subscriberTransform.models.EnterpriseAge;
import org.endeavourhealth.core.database.rdbms.ConnectionManager;
import org.endeavourhealth.core.exceptions.TransformException;
import org.endeavourhealth.core.fhirStorage.FhirSerializationHelper;
import org.endeavourhealth.core.fhirStorage.FhirStorageService;
import org.endeavourhealth.core.fhirStorage.JsonServiceInterfaceEndpoint;
import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange;
import org.endeavourhealth.core.queueing.QueueHelper;
import org.endeavourhealth.core.xml.TransformErrorSerializer;
import org.endeavourhealth.core.xml.transformError.TransformError;
import org.endeavourhealth.subscriber.filer.EnterpriseFiler;
import org.endeavourhealth.transform.barts.transforms.PPADDTransformer;
import org.endeavourhealth.transform.barts.transforms.PPNAMTransformer;
import org.endeavourhealth.transform.barts.transforms.PPPHOTransformer;
import org.endeavourhealth.transform.common.*;
import org.endeavourhealth.transform.common.resourceBuilders.PatientBuilder;
import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer;
import org.endeavourhealth.transform.emis.csv.helpers.EmisCsvHelper;
import org.endeavourhealth.transform.enterprise.json.LinkDistributorConfig;
import org.endeavourhealth.transform.enterprise.transforms.PatientTransformer;
import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.io.*;
import java.lang.reflect.Constructor;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Date;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import java.util.zip.ZipOutputStream;
public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static void main(String[] args) throws Exception {
String configId = args[0];
LOG.info("Initialising config manager");
ConfigManager.initialize("queuereader", configId);
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEncounters")) {
String table = args[1];
fixEncounters(table);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateHomertonSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateAdastraSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateVisionSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTppSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createTppSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsSubset")) {
String sourceDirPath = args[1];
UUID serviceUuid = UUID.fromString(args[2]);
UUID systemUuid = UUID.fromString(args[3]);
String samplePatientsFile = args[4];
createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsOrgs")) {
String serviceId = args[1];
fixBartsOrgs(serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestPreparedStatements")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String serviceId = args[4];
testPreparedStatements(url, user, pass, serviceId);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTransformMap")) {
UUID serviceId = UUID.fromString(args[1]);
String table = args[2];
String dstFile = args[3];
createTransforMap(serviceId, table, dstFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportFhirToCsv")) {
UUID serviceId = UUID.fromString(args[1]);
String path = args[2];
exportFhirToCsv(serviceId, path);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestBatchInserts")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String num = args[4];
String batchSize = args[5];
testBatchInserts(url, user, pass, num, batchSize);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) {
applyEmisAdminCaches();
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSubscribers")) {
fixSubscriberDbs();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems")) {
String serviceId = args[1];
String systemId = args[2];
fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3Read")) {
String s3Bucket = args[1];
String s3Key = args[2];
String start = args[3];
String len = args[4];
testS3Read(s3Bucket, s3Key, start, len);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) {
String publisherId = args[1];
String systemId = args[2];
fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId));
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems3")) {
String serviceId = args[1];
String systemId = args[2];
fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("CheckDeletedObs")) {
String serviceId = args[1];
String systemId = args[2];
checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) {
fixPersonsNoNhsNumber();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) {
String subscriberConfigName = args[1];
populateSubscriberUprnTable(subscriberConfigName);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertEmisGuid")) {
convertEmisGuids();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToRabbit")) {
String exchangeName = args[1];
String srcFile = args[2];
Integer throttle = null;
if (args.length > 3) {
throttle = Integer.parseInt(args[3]);
}
postToRabbit(exchangeName, srcFile, throttle);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToProtocol")) {
String srcFile = args[1];
postToProtocol(srcFile);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsPatients")) {
UUID serviceId = UUID.fromString(args[1]);
fixBartsPatients(serviceId);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDeceasedPatients")) {
String subscriberConfig = args[1];
fixDeceasedPatients(subscriberConfig);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPseudoIds")) {
String subscriberConfig = args[1];
int threads = Integer.parseInt(args[2]);
fixPseudoIds(subscriberConfig, threads);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("MoveS3ToAudit")) {
moveS3ToAudit();
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertExchangeBody")) {
String systemId = args[1];
convertExchangeBody(UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixReferrals")) {
fixReferralRequests();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateNewSearchTable")) {
String table = args[1];
populateNewSearchTable(table);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsEscapes")) {
String filePath = args[1];
fixBartsEscapedFiles(filePath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
String systemId = args[2];
String filePath = args[3];
postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDisabledExtract")) {
String sharedStoragePath = args[1];
String tempDir = args[2];
String systemId = args[3];
String serviceOdsCode = args[4];
fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestSlack")) {
testSlack();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
boolean all = Boolean.parseBoolean(args[2]);
postToInbound(UUID.fromString(serviceId), all);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPatientSearch")) {
String serviceId = args[1];
String systemId = null;
if (args.length > 2) {
systemId = args[2];
}
if (serviceId.equalsIgnoreCase("All")) {
fixPatientSearchAllServices(systemId);
} else {
fixPatientSearch(serviceId, systemId);
}
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSlotReferences")) {
String serviceId = args[1];
try {
UUID serviceUuid = UUID.fromString(serviceId);
fixSlotReferences(serviceUuid);
} catch (Exception ex) {
fixSlotReferencesForPublisher(serviceId);
}
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3VsMySQL")) {
UUID serviceUuid = UUID.fromString(args[1]);
int count = Integer.parseInt(args[2]);
int sqlBatchSize = Integer.parseInt(args[3]);
String bucketName = args[4];
testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("Exit")) {
String exitCode = args[1];
LOG.info("Exiting with error code " + exitCode);
int exitCodeInt = Integer.parseInt(exitCode);
System.exit(exitCodeInt);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("RunSql")) {
String host = args[1];
String username = args[2];
String password = args[3];
String sqlFile = args[4];
runSql(host, username, password, sqlFile);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateProtocolQueue")) {
String serviceId = null;
if (args.length > 1) {
serviceId = args[1];
}
String startingExchangeId = null;
if (args.length > 2) {
startingExchangeId = args[2];
}
populateProtocolQueue(serviceId, startingExchangeId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEncounterTerms")) {
String path = args[1];
String outputPath = args[2];
findEncounterTerms(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEmisStartDates")) {
String path = args[1];
String outputPath = args[2];
findEmisStartDates(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportHl7Encounters")) {
String sourceCsvPpath = args[1];
String outputPath = args[2];
exportHl7Encounters(sourceCsvPpath, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixExchangeBatches")) {
fixExchangeBatches();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindCodes")) {
findCodes();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindDeletedOrgs")) {
findDeletedOrgs();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("LoadBartsData")) {
String serviceId = args[1];
String systemId = args[2];
String dbUrl = args[3];
String dbUsername = args[4];
String dbPassword = args[5];
String onlyThisFileType = null;
if (args.length > 6) {
onlyThisFileType = args[6];
}
loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsDataTables")) {
createBartsDataTables();
System.exit(0);
}
if (args.length != 1) {
LOG.error("Usage: queuereader config_id");
return;
}
LOG.info("
LOG.info("EDS Queue Reader " + configId);
LOG.info("
LOG.info("Fetching queuereader configuration");
String configXml = ConfigManager.getConfiguration(configId);
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
/*LOG.info("Registering shutdown hook");
registerShutdownHook();*/
// Instantiate rabbit handler
LOG.info("Creating EDS queue reader");
RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId);
// Begin consume
rabbitHandler.start();
LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")");
}
private static void moveS3ToAudit() {
LOG.info("Moving S3 to Audit");
try {
//list S3 contents
List<FileInfo> files = FileHelper.listFilesInSharedStorageWithInfo("s3://discoveryaudit/audit");
LOG.debug("Found " + files.size() + " audits");
SourceFileMappingDalI db = DalProvider.factorySourceFileMappingDal();
//write to database
int done = 0;
Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>();
for (FileInfo info: files) {
String path = info.getFilePath();
InputStream inputStream = FileHelper.readFileFromSharedStorage(path);
ZipInputStream zis = new ZipInputStream(inputStream);
ZipEntry entry = zis.getNextEntry();
if (entry == null) {
throw new Exception("No entry in zip file " + path);
}
byte[] entryBytes = IOUtils.toByteArray(zis);
String json = new String(entryBytes);
inputStream.close();
ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(json);
ResourceWrapper wrapper = new ResourceWrapper();
String versionStr = FilenameUtils.getBaseName(path);
wrapper.setVersion(UUID.fromString(versionStr));
Date d = info.getLastModified();
wrapper.setCreatedAt(d);
File f = new File(path);
f = f.getParentFile();
String resourceIdStr = f.getName();
wrapper.setResourceId(UUID.fromString(resourceIdStr));
f = f.getParentFile();
String resourceTypeStr = f.getName();
wrapper.setResourceType(resourceTypeStr);
f = f.getParentFile();
String serviceIdStr = f.getName();
wrapper.setServiceId(UUID.fromString(serviceIdStr));
batch.put(wrapper, audit);
if (batch.size() > 5) {
db.saveResourceMappings(batch);
batch.clear();
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + files.size());
}
}
if (!batch.isEmpty()) {
db.saveResourceMappings(batch);
batch.clear();
}
LOG.info("Finished Moving S3 to Audit");
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void convertEmisGuids() {
LOG.debug("Converting Emis Guid");
try {
Map<String, String> map = new HashMap<>();
//this list of guids and dates is based off the live Emis extracts, giving the most recent bulk date for each organisation
//only practices where the extract started before the move to AWS and where the extract was disabled and re-bulked need to be in here.
//Practices disabled and re-bulked since the move to AWS are handled differently.
map.put("{DD31E915-7076-46CF-99CD-8378AB588B69}", "20/07/2017");
map.put("{87A8851C-3DA4-4BE0-869C-3BF6BA7C0612}", "15/10/2017");
map.put("{612DCB3A-5BE6-4D50-909B-F0F20565F9FC}", "09/08/2017");
map.put("{15667F8D-46A0-4A87-9FA8-0C56B157A0A9}", "05/05/2017");
map.put("{3CFEFBF9-B856-4A40-A39A-4EB6FA39295E}", "31/01/2017");
map.put("{3F481450-AD19-4793-B1F0-40D5C2C57EF7}", "04/11/2017");
map.put("{83939542-20E4-47C5-9883-BF416294BB22}", "13/10/2017");
map.put("{73AA7E3A-4331-4167-8711-FE07DDBF4657}", "15/10/2017");
map.put("{3B703CCF-C527-4EC8-A802-00D3B1535DD0}", "01/02/2017");
map.put("{ED442CA3-351F-43E4-88A2-2EEACE39A402}", "13/10/2017");
map.put("{86537B5B-7CF3-4964-8906-7C10929FBC20}", "13/05/2017");
map.put("{9A4518C4-82CE-4509-8039-1B5F49F9C1FA}", "12/08/2017");
map.put("{16D7F8F9-4A35-44B1-8F1D-DD0162584684}", "11/07/2017");
map.put("{D392C499-345C-499B-898C-93F2CB8CC1B9}", "15/10/2017");
map.put("{5B87882A-0EE8-4233-93D0-D2F5F4F94040}", "15/03/2017");
map.put("{CFE3B460-9058-47FB-BF1D-6BEC13A2257D}", "19/04/2017");
map.put("{7B03E105-9275-47CC-8022-1469FE2D6AE4}", "20/04/2017");
map.put("{94470227-587C-47D7-A51F-9893512424D8}", "27/04/2017");
map.put("{734F4C99-6326-4CA4-A22C-632F0AC12FFC}", "17/10/2017");
map.put("{03C5B4B4-1A70-45F8-922E-135C826D48E0}", "20/04/2017");
map.put("{1BB17C3F-CE80-4261-AF6C-BE987E3A5772}", "09/05/2017");
map.put("{16F6DD42-2140-4395-95D5-3FA50E252896}", "20/04/2017");
map.put("{3B6FD632-3FFB-48E6-9775-287F6C486752}", "15/10/2017");
map.put("{F987F7BD-E19C-46D2-A446-913489F1BB7A}", "05/02/2017");
map.put("{BE7CC1DC-3CAB-4BB1-A5A2-B0C854C3B78E}", "06/07/2017");
map.put("{303EFA4E-EC8F-4CBC-B629-960E4D799E0D}", "15/10/2017");
map.put("{5EE8FD1F-F23A-4209-A1EE-556F9350C900}", "01/02/2017");
map.put("{04F6C555-A298-45F1-AC5E-AC8EBD2BB720}", "17/10/2017");
map.put("{67383254-F7F1-4847-9AA9-C7DCF32859B8}", "17/10/2017");
map.put("{31272E4E-40E0-4103-ABDC-F40A7B75F278}", "19/10/2017");
map.put("{09CA2E3B-7143-4999-9934-971F3F2E6D8C}", "15/10/2017");
map.put("{0527BCE2-4315-47F2-86A1-2E9F3E50399B}", "15/10/2017");
map.put("{16DD14B5-D1D5-4B0C-B886-59AC4DACDA7A}", "04/07/2017");
map.put("{411D0A79-6913-473C-B486-C01F6430D8A6}", "21/09/2017");
map.put("{0862FADA-594A-415E-B971-7A4312E0A58C}", "10/06/2017");
map.put("{249C3F3C-24F0-44CE-97A9-B535982BD70C}", "15/10/2017");
map.put("{5D7A1915-6E22-4B20-A8AE-4768C06D3BBF}", "28/09/2017"); //Barts community
map.put("{131AE556-8B50-4C17-9D7D-A4B19F7B1FEA}", "15/10/2017"); //Aberfeldy practice F84698
map.put("{C0D2D0DF-EF78-444D-9A6D-B9EDEF5EF350}", "13/10/2017");
map.put("{F174B354-4156-4BCB-960F-35D0145075EA}", "01/02/2017");
map.put("{38600D63-1DE0-4910-8ED6-A38DC28A9DAA}", "19/02/2018"); //THE SPITALFIELDS PRACTICE (CDB 16);F84081
map.put("{B3ECA2DE-D926-4594-B0EA-CF2F28057CE1}", "19/10/2017");
map.put("{18F7C28B-2A54-4F82-924B-38C60631FFFA}", "04/02/2018"); //Rowans Surgery (CDB 18174);H85035
map.put("{16FB5EE8-5039-4068-BC42-1DB56DC2A530}", "08/06/2017");
map.put("{4BA4A5AC-7B25-40B2-B0EA-135702A72F9D}", "15/10/2017");
map.put("{01B8341F-BC8F-450E-8AFA-4CDA344A5009}", "15/10/2017");
map.put("{E6FBEA1C-BDA2-40B7-A461-C262103F08D7}", "08/06/2017");
map.put("{141C68EB-1BC8-4E99-A9D9-0E63A8944CA9}", "15/10/2017");
map.put("{A3EA804D-E7EB-43EE-8F1F-E860F6337FF7}", "15/10/2017");
map.put("{771B42CC-9C0C-46E2-8143-76F04AF91AD5}", "13/11/2017"); //cranwich road
map.put("{16EA8D5C-C667-4818-B629-5D6F4300FEEF}", "11/05/2017");
map.put("{29E51964-C94D-4CB4-894E-EB18E27DEFC1}", "15/10/2017");
map.put("{3646CCA5-7FE4-4DFE-87CD-DA3CE1BA885D}", "27/09/2017");
map.put("{3EC82820-702F-4218-853B-D3E5053646A8}", "05/05/2017");
map.put("{37F3E676-B203-4329-97F8-2AF5BFEAEE5A}", "19/10/2017");
map.put("{A0E3208B-95E9-4284-9B5A-D4D387CCC9F9}", "07/06/2017");
map.put("{0BEAF1F0-9507-4AC2-8997-EC0BA1D0247E}", "19/10/2017");
map.put("{071A50E7-1764-4210-94EF-6A4BF96CF753}", "21/02/2017");
map.put("{0C1983D8-FB7D-4563-84D0-1F8F6933E786}", "20/07/2017");
map.put("{871FEEB2-CE30-4603-B9A3-6FA6CC47B5D4}", "15/10/2017");
map.put("{42906EBE-8628-486D-A52F-27B935C9937A}", "01/02/2017");
map.put("{1AB7ABF3-2572-4D07-B719-CFB2FE3AAC80}", "15/10/2017");
map.put("{E312A5B7-13E7-4E43-BE35-ED29F6216D3C}", "20/04/2017");
map.put("{55E60891-8827-40CD-8011-B0223D5C8970}", "15/10/2017");
map.put("{03A63F52-7FEE-4592-9B54-83CEBCF67B5D}", "26/04/2017");
map.put("{DB39B649-B48D-4AC2-BAB1-AC807AABFAC4}", "15/10/2017");
map.put("{0AF9B2AF-A0FB-40B0-BA05-743BA6845DB1}", "26/08/2017");
map.put("{A7600092-319C-4213-92C2-738BEEFC1609}", "31/01/2017");
map.put("{5A1AABA9-7E96-41E7-AF18-E02F4CF1DFB6}", "15/10/2017");
map.put("{7D8CE31D-66AA-4D6A-9EFD-313646BD1D73}", "15/10/2017");
map.put("{03EA4A79-B6F1-4524-9D15-992B47BCEC9A}", "15/10/2017");
map.put("{4588C493-2EA3-429A-8428-E610AE6A6D76}", "28/09/2017"); //Barts community
map.put("{B13F3CC9-C317-4E0D-9C57-C545E4A53CAF}", "15/10/2017");
map.put("{463DA820-6EC4-48CB-B915-81B31AFBD121}", "13/10/2017");
map.put("{16F0D65C-B2A8-4186-B4E7-BBAF4390EC55}", "13/10/2017");
map.put("{0039EF15-2DCF-4F70-B371-014C807210FD}", "24/05/2017");
map.put("{E132BF05-78D9-4E4B-B875-53237E76A0FA}", "19/10/2017");
map.put("{3DFC2DA6-AD8C-4836-945D-A6F8DB22AA49}", "15/10/2017");
map.put("{BCB43B1D-2857-4186-918B-460620F98F81}", "13/10/2017");
map.put("{E134C74E-FA3E-4E14-A4BB-314EA3D3AC16}", "15/10/2017");
map.put("{C0F40044-C2CA-4D1D-95D3-553B29992385}", "26/08/2017");
map.put("{B174A018-538D-4065-838C-023A245B53DA}", "14/02/2017");
map.put("{43380A69-AE7D-4ED7-B014-0708675D0C02}", "08/06/2017");
map.put("{E503F0E0-FE56-4CEF-BAB5-0D25B834D9BD}", "13/10/2017");
map.put("{08946F29-1A53-4AF2-814B-0B8758112F21}", "07/02/2018"); //NEWHAM MEDICAL CENTRE (CDB 3461);F84669
map.put("{09857684-535C-4ED6-8007-F91F366611C6}", "19/10/2017");
map.put("{C409A597-009A-4E11-B828-A595755DE0EA}", "17/10/2017");
map.put("{58945A1C-2628-4595-8F8C-F75D93045949}", "15/10/2017");
map.put("{16FF2874-20B0-4188-B1AF-69C97055AA60}", "17/10/2017");
map.put("{2C91E9DA-3F92-464E-B6E6-61D3DE52E62F}", "15/10/2017");
map.put("{16E7AD27-2AD9-43C0-A473-1F39DF93E981}", "10/06/2017");
map.put("{A528478D-65DB-435C-9E98-F8BDB49C9279}", "20/04/2017");
map.put("{A2BDB192-E79C-44C5-97A2-1FD4517C456F}", "21/08/2017");
map.put("{73DFF193-E917-4DBC-B5CF-DD2797B29377}", "15/10/2017");
map.put("{62825316-9107-4E2C-A22C-86211B4760DA}", "13/10/2017");
map.put("{006E8A30-2A45-4DBE-91D7-1C53FADF38B1}", "28/01/2018"); //The Lawson Practice (CDB 4334);F84096
map.put("{E32AA6A6-46B1-4198-AA13-058038AB8746}", "13/10/2017");
map.put("{B51160F1-79E3-4BA7-AA3D-1112AB341146}", "30/09/2017");
map.put("{234503E5-56B4-45A0-99DA-39854FBE78E9}", "01/02/2017");
map.put("{7D1852DA-E264-4599-B9B4-8F40207F967D}", "09/10/2017");
map.put("{44716213-7FEE-4247-A09E-7285BD6B69C6}", "13/10/2017");
map.put("{19BCC870-2704-4D21-BA7B-56F2F472AF35}", "15/10/2017");
map.put("{FEF842DA-FD7C-480F-945A-D097910A81EB}", "13/10/2017");
map.put("{1C980E19-4A39-4ACD-BA8A-925D3E525765}", "13/10/2017");
map.put("{AABDDC3A-93A4-4A87-9506-AAF52E74012B}", "07/02/2018"); //DR N DRIVER AND PARTNERS (CDB 4419);F84086
map.put("{90C2959C-0C2D-43DC-A81B-4AD594C17999}", "20/04/2017");
map.put("{1F1669CF-1BB0-47A7-8FBF-BE65651644C1}", "15/10/2017");
map.put("{C1800BE8-4C1D-4340-B0F2-7ED208586ED3}", "15/10/2017");
map.put("{55A94703-4582-46FB-808A-1990E9CBCB6F}", "19/02/2018"); //Stamford Hill Group Practice (CDB 56);F84013
map.put("{D4996E62-268F-4759-83A6-7A68D0B38CEC}", "27/04/2017");
map.put("{3C843BBA-C507-4A95-9934-1A85B977C7B8}", "01/02/2017");
map.put("{2216253B-705D-4C46-ADB3-ED48493D6A39}", "03/02/2018"); //RIVERSIDE MEDICAL PRACTICE (CDB 14675);Y01962
map.put("{00123F97-4557-44AD-81B5-D9902DD72EE9}", "28/04/2017");
map.put("{E35D4D12-E7D2-484B-BFF6-4653B3FED228}", "15/10/2017");
map.put("{6D8B4D28-838B-4915-A148-6FEC2CEBCE77}", "05/07/2017");
map.put("{188D5B4D-4BF6-46E3-AF11-3AD32C68D251}", "19/10/2017");
map.put("{16F7DDE1-3763-4D3A-A58D-F12F967718CF}", "02/11/2017");
map.put("{03148933-6E1C-4A8A-A6D2-A3D488E14DDD}", "30/12/2017");
map.put("{16DE1A3C-875B-4AB2-B227-8A42604E029C}", "05/11/2017");
map.put("{D628D1BC-D02E-4101-B8CD-5B3DB2D06FC1}", "05/05/2017");
map.put("{1EA6259A-6A49-46DB-991D-D604675F87E2}", "15/10/2017");
map.put("{817F9B46-AEE0-45D5-95E3-989F75C4844E}", "20/04/2017");
map.put("{1C422471-F52A-4C30-8D23-140BEB7AAEFC}", "15/08/2017");
map.put("{A6467E73-0F15-49D6-AFAB-4DFB487E7963}", "10/05/2017");
map.put("{CC7D1781-1B85-4AD6-A5DD-9AD5E092E8DB}", "13/10/2017");
map.put("{167CD5C8-148F-4D78-8997-3B22EC0AF6B6}", "13/10/2017");
map.put("{9DD5D2CE-2585-49D8-AF04-2CB1BD137594}", "15/10/2017");
map.put("{D6696BB5-DE69-49D1-BC5E-C56799E42640}", "07/02/2018"); //BOLEYN MEDICAL CENTRE (CDB 4841);F84050
map.put("{169375A9-C3AB-4C5E-82B0-DFF7656AD1FA}", "20/04/2017");
map.put("{0A8ECFDE-95EE-4811-BC05-668D49F5C799}", "19/11/2017");
map.put("{79C898A1-BB92-48F9-B0C3-6725370132B5}", "20/10/2017");
map.put("{472AC9BA-AFFE-4E81-81CA-40DD8389784D}", "27/04/2017");
map.put("{00121CB7-76A6-4D57-8260-E9CA62FFCD77}", "13/10/2017");
map.put("{0FCBA0A7-7CAB-4E75-AC81-5041CD869CA1}", "15/10/2017");
map.put("{00A9C32D-2BB2-4A20-842A-381B3F2031C0}", "19/10/2017");
map.put("{26597C5A-3E29-4960-BE11-AC75D0430615}", "03/05/2017");
map.put("{D945FEF7-F5EF-422B-AB35-6937F9792B54}", "15/10/2017");
map.put("{16D685C6-130A-4B19-BCA9-90AC7DC16346}", "08/07/2017");
map.put("{F09E9CEF-2615-4C9D-AA3D-79E0AB10D0B3}", "13/10/2017");
map.put("{CD7EF748-DB88-49CF-AA6E-24F65029391F}", "15/10/2017");
map.put("{B22018CF-2B52-4A1A-9F6A-CEA13276DB2E}", "19/10/2017");
map.put("{0DF8CFC7-5DE6-4DDB-846A-7F28A2740A00}", "02/12/2017");
map.put("{50F439E5-DB18-43A0-9F25-825957013A07}", "11/01/2018"); //DR PI ABIOLA (CDB 5681);F84631
map.put("{00A3BA25-21C6-42DE-82AA-55FF0D85A6C3}", "31/10/2018"); //MARKET STREET HEALTH GROUP (CDB 381);F84004
map.put("{77B59D29-0FD9-4737-964F-5DBA49D94AB6}", "31/10/2018"); //Star Lane Medical Centre (CDB 40);F84017
map.put("{91239362-A105-4DEA-8E8E-239C3BCEDFD2}", "11/01/2018"); //BEECHWOOD MEDICAL CENTRE (CDB 5661);F84038
map.put("{53A113F5-6E3B-410F-A473-53E38A79335B}", "01/06/2018"); //ELFT Community RWKGY CDB 25362
map.put("{164BE8EC-E2D5-40DE-A5FC-25E058A5C47E}", "17/10/2018"); //Haiderian Medical Centre F82002
map.put("{164CE1B0-F7B3-44AF-B1E4-3DA6C64DEA4C}", "26/11/2018"); //THE GREEN WOOD PRACTICE F82007
map.put("{A30A4BB7-B17B-11D9-AD5F-00D0B77FCBFC}", "26/11/2018"); //Tulasi Medical Practice F82660
LOG.debug("Starting with map size " + map.size());
Map<String, String> hmGuidToOdsMap = new HashMap<>();
UUID systemId = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
UUID serviceId = service.getId();
String ods = service.getLocalId();
String orgGuid = null;
List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 5);
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (!files.isEmpty()) {
ExchangePayloadFile first = files.get(0);
String path = first.getPath();
if (path.indexOf("EMIS_CUSTOM") > -1) {
continue;
}
File f = new File(path);
f = f.getParentFile(); //org GUID
orgGuid = f.getName();
break;
}
}
if (orgGuid == null) {
LOG.error("Failed to find OrgGuid for " + service.getName() + " " + ods);
} else {
hmGuidToOdsMap.put(orgGuid, ods);
}
}
//create new code
for (String orgGuid: map.keySet()) {
String dateStr = map.get(orgGuid);
String odsCode = hmGuidToOdsMap.get(orgGuid);
if (Strings.isNullOrEmpty(odsCode)) {
LOG.error("Missing ODS code for " + orgGuid);
} else {
System.out.println("map.put(\"" + odsCode + "\", \"" + dateStr + "\");");
}
}
LOG.debug("Finished Converting Emis Guid");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) {
LOG.debug("Testing S3 vs MySQL for service " + serviceUuid);
try {
//retrieve some audit JSON from the DB
EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
String sql = "select resource_id, resource_type, version, mappings_json"
+ " from resource_field_mappings"
+ " where mappings_json != '[]'";
if (count > -1) {
sql += "limit " + count + ";";
}
Statement statement = connection.createStatement();
statement.setFetchSize(1000);
ResultSet rs = statement.executeQuery(sql);
List<ResourceFieldMapping> list = new ArrayList<>();
while (rs.next()) {
int col = 1;
String resourceId = rs.getString(col++);
String resourceType = rs.getString(col++);
String version = rs.getString(col++);
String json = rs.getString(col++);
ResourceFieldMapping obj = new ResourceFieldMapping();
obj.setResourceId(UUID.fromString(resourceId));
obj.setResourceType(resourceType);
obj.setVersion(UUID.fromString(version));
obj.setResourceField(json);
list.add(obj);
}
rs.close();
statement.close();
entityManager.close();
int done = 0;
//test writing to S3
long s3Start = System.currentTimeMillis();
LOG.debug("Doing S3 test");
for (int i=0; i<list.size(); i++) {
ResourceFieldMapping mapping = list.get(i);
String entryName = mapping.getVersion().toString() + ".json";
String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip";
String jsonStr = mapping.getResourceField();
//may as well zip the data, since it will compress well
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
zos.putNextEntry(new ZipEntry(entryName));
zos.write(jsonStr.getBytes());
zos.flush();
zos.close();
byte[] bytes = baos.toByteArray();
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
//ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider();
DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance();
AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder
.standard()
.withCredentials(credentialsProvider)
.withRegion(Regions.EU_WEST_2);
AmazonS3 s3Client = clientBuilder.build();
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
objectMetadata.setContentLength(bytes.length);
PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata);
s3Client.putObject(putRequest);
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + list.size());
}
}
long s3End = System.currentTimeMillis();
LOG.debug("S3 took " + (s3End - s3Start) + " ms");
//test inserting into a DB
long sqlStart = System.currentTimeMillis();
LOG.debug("Doing SQL test");
sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)";
entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
session = (SessionImpl) entityManager.getDelegate();
connection = session.connection();
PreparedStatement ps = connection.prepareStatement(sql);
entityManager.getTransaction().begin();
done = 0;
int currentBatchSize = 0;
for (int i=0; i<list.size(); i++) {
ResourceFieldMapping mapping = list.get(i);
int col = 1;
ps.setString(col++, mapping.getResourceId().toString());
ps.setString(col++, mapping.getResourceType());
ps.setDate(col++, new java.sql.Date(System.currentTimeMillis()));
ps.setString(col++, mapping.getVersion().toString());
ps.setString(col++, mapping.getResourceField());
ps.addBatch();
currentBatchSize ++;
if (currentBatchSize >= sqlBatchSize
|| i+1 == list.size()) {
ps.executeBatch();
entityManager.getTransaction().commit();
//mirror what would happen normally
ps.close();
entityManager.close();
if (i+1 < list.size()) {
entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
session = (SessionImpl) entityManager.getDelegate();
connection = session.connection();
ps = connection.prepareStatement(sql);
entityManager.getTransaction().begin();
}
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + list.size());
}
}
long sqlEnd = System.currentTimeMillis();
LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms");
LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createBartsDataTables() {
LOG.debug("Creating Barts data tables");
try {
List<String> fileTypes = new ArrayList<>();
fileTypes.add("AEATT");
fileTypes.add("Birth");
//fileTypes.add("BulkDiagnosis");
//fileTypes.add("BulkProblem");
//fileTypes.add("BulkProcedure");
fileTypes.add("CLEVE");
fileTypes.add("CVREF");
fileTypes.add("Diagnosis");
fileTypes.add("ENCINF");
fileTypes.add("ENCNT");
fileTypes.add("FamilyHistory");
fileTypes.add("IPEPI");
fileTypes.add("IPWDS");
fileTypes.add("LOREF");
fileTypes.add("NOMREF");
fileTypes.add("OPATT");
fileTypes.add("ORGREF");
fileTypes.add("PPADD");
fileTypes.add("PPAGP");
fileTypes.add("PPALI");
fileTypes.add("PPINF");
fileTypes.add("PPNAM");
fileTypes.add("PPPHO");
fileTypes.add("PPREL");
fileTypes.add("Pregnancy");
fileTypes.add("Problem");
fileTypes.add("PROCE");
fileTypes.add("Procedure");
fileTypes.add("PRSNLREF");
fileTypes.add("SusEmergency");
fileTypes.add("SusInpatient");
fileTypes.add("SusOutpatient");
//fileTypes.add("Tails"); TODO - have three separate tails files
fileTypes.add("EventCode");
fileTypes.add("EventSetCanon");
fileTypes.add("EventSet");
fileTypes.add("EventSetExplode");
fileTypes.add("BlobContent");
fileTypes.add("SusInpatientTail");
fileTypes.add("SusOutpatientTail");
fileTypes.add("SusEmergencyTail");
fileTypes.add("AEINV");
fileTypes.add("AETRE");
fileTypes.add("OPREF");
fileTypes.add("STATREF");
fileTypes.add("RTTPE");
fileTypes.add("PPATH");
fileTypes.add("DOCRP");
fileTypes.add("SCHAC");
fileTypes.add("EALEN");
fileTypes.add("DELIV");
fileTypes.add("EALOF");
fileTypes.add("SusEmergencyCareDataSet");
fileTypes.add("SusEmergencyCareDataSetTail");
for (String fileType: fileTypes) {
createBartsDataTable(fileType);
}
LOG.debug("Finished Creating Barts data tables");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createBartsDataTable(String fileType) throws Exception {
ParserI parser = null;
try {
String clsName = "org.endeavourhealth.transform.barts.schema." + fileType;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
parser = constructor.newInstance(null, null, null, null, null);
} catch (ClassNotFoundException cnfe) {
System.out.println("-- No parser for file type [" + fileType + "]");
return;
}
System.out.println("-- " + fileType);
String table = fileType.replace(" ", "_");
String dropSql = "DROP TABLE IF EXISTS `" + table + "`;";
System.out.println(dropSql);
String sql = "CREATE TABLE `" + table + "` (";
sql += "file_name varchar(100)";
if (parser instanceof AbstractFixedParser) {
AbstractFixedParser fixedParser = (AbstractFixedParser)parser;
List<FixedParserField> fields = fixedParser.getFieldList();
for (FixedParserField field: fields) {
String col = field.getName();
int len = field.getFieldlength();
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
sql += " varchar(";
sql += len;
sql += ")";
}
} else {
List<String> cols = parser.getColumnHeaders();
for (String col: cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
if (col.equals("BLOB_CONTENTS")
|| col.equals("VALUE_LONG_TXT")
|| col.equals("COMMENT_TXT")
|| col.equals("NONPREG_REL_PROBLM_SCT_CD")) {
sql += " mediumtext";
} else if (col.indexOf("Date") > -1
|| col.indexOf("Time") > -1) {
sql += " varchar(10)";
} else {
sql += " varchar(255)";
}
}
}
sql += ");";
/*LOG.debug("-- fileType");
LOG.debug(sql);*/
System.out.println(sql);
}
private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) {
LOG.debug("Loading Barts data from into " + dbUrl);
try {
//hash file type of every file
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date startDate = sdf.parse("2018-11-01");
//Date startDate = sdf.parse("2018-09-17");
//Date endDate = sdf.parse("2018-09-30");
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (files.isEmpty()) {
continue;
}
for (ExchangePayloadFile file: files) {
String type = file.getType();
String path = file.getPath();
//if only doing a specific file type, skip all others
if (onlyThisFileType != null
&& !type.equals(onlyThisFileType)) {
continue;
}
boolean processFile = false;
if (type.equalsIgnoreCase("CVREF")
|| type.equalsIgnoreCase("LOREF")
|| type.equalsIgnoreCase("ORGREF")
|| type.equalsIgnoreCase("PRSNLREF")
|| type.equalsIgnoreCase("NOMREF")) {
processFile = true;
} else {
File f = new File(path);
File parentFile = f.getParentFile();
String parentDir = parentFile.getName();
Date extractDate = sdf.parse(parentDir);
if (!extractDate.before(startDate)) {
processFile = true;
}
/*if (!extractDate.before(startDate)
&& !extractDate.after(endDate)) {
processFile = true;
}*/
}
if (processFile) {
loadBartsDataFromFile(conn, path, type);
}
}
}
conn.close();
LOG.debug("Finished Loading Barts data from into " + dbUrl);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception {
LOG.debug("Loading " + fileType + ": " + filePath);
String fileName = FilenameUtils.getName(filePath);
ParserI parser = null;
try {
String clsName = "org.endeavourhealth.transform.barts.schema." + fileType;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
parser = constructor.newInstance(null, null, null, null, filePath);
} catch (ClassNotFoundException cnfe) {
LOG.error("No parser for file type [" + fileType + "]");
return;
}
String table = fileType.replace(" ", "_");
//check table is there
String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(sql);
boolean tableExists = rs.next();
rs.close();
statement.close();
if (!tableExists) {
LOG.error("No table exists for " + table);
return;
}
//create insert statement
sql = "INSERT INTO `" + table + "` (";
sql += "file_name";
List<String> cols = parser.getColumnHeaders();
for (String col: cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
}
sql += ") VALUES (";
sql += "?";
for (String col: cols) {
sql += ", ";
sql += "?";
}
sql += ")";
PreparedStatement ps = conn.prepareStatement(sql);
List<String> currentBatchStrs = new ArrayList<>();
//load table
try {
int done = 0;
int currentBatchSize = 0;
while (parser.nextRecord()) {
int col = 1;
//file name is always first
ps.setString(col++, fileName);
for (String colName : cols) {
CsvCell cell = parser.getCell(colName);
if (cell == null) {
ps.setNull(col++, Types.VARCHAR);
} else {
ps.setString(col++, cell.getString());
}
}
ps.addBatch();
currentBatchSize++;
currentBatchStrs.add((ps.toString())); //for error handling
if (currentBatchSize >= 5) {
ps.executeBatch();
currentBatchSize = 0;
currentBatchStrs.clear();
}
done++;
if (done % 5000 == 0) {
LOG.debug("Done " + done);
}
}
if (currentBatchSize >= 0) {
ps.executeBatch();
}
ps.close();
} catch (Throwable t) {
LOG.error("Failed on batch with statements:");
for (String currentBatchStr: currentBatchStrs) {
LOG.error(currentBatchStr);
}
throw t;
}
LOG.debug("Finished " + fileType + ": " + filePath);
}
private static void fixPseudoIds(String subscriberConfig, int threads) {
LOG.debug("Fixing Pseudo IDs for " + subscriberConfig);
try {
//update psuedo ID on patient table
//update psuedo ID on person table
//update pseudo ID on subscriber_transform mapping table
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
JsonNode saltNode = config.get("pseudonymisation");
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(saltNode.toString(), Object.class);
String linkDistributors = mapper.writeValueAsString(json);
LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class);
LinkDistributorConfig[] arr = null;
JsonNode linkDistributorsNode = config.get("linkedDistributors");
if (linkDistributorsNode != null) {
json = mapper.readValue(linkDistributorsNode.toString(), Object.class);
linkDistributors = mapper.writeValueAsString(json);
arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class);
}
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
List<Long> patientIds = new ArrayList<>();
Map<Long, Long> hmOrgIds = new HashMap<>();
Map<Long, Long> hmPersonIds = new HashMap<>();
String sql = "SELECT id, organization_id, person_id FROM patient";
Statement statement = subscriberConnection.createStatement();
statement.setFetchSize(10000);
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
long patientId = rs.getLong(1);
long orgId = rs.getLong(2);
long personId = rs.getLong(3);
patientIds.add(new Long(patientId));
hmOrgIds.put(new Long(patientId), new Long(orgId));
hmPersonIds.put(new Long(patientId), new Long(personId));
}
rs.close();
subscriberConnection.close();
LOG.debug("Found " + patientIds.size() + " patients");
AtomicInteger done = new AtomicInteger();
int pos = 0;
List<Thread> threadList = new ArrayList<>();
for (int i=0; i<threads; i++) {
List<Long> patientSubset = new ArrayList<>();
int count = patientIds.size() / threads;
if (i+1 == threads) {
count = patientIds.size() - pos;
}
for (int j=0; j<count; j++) {
Long patientId = patientIds.get(pos);
patientSubset.add(patientId);
pos ++;
}
FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done);
Thread t = new Thread(runnable);
t.start();
threadList.add(t);
}
while (true) {
Thread.sleep(5000);
boolean allDone = true;
for (Thread t: threadList) {
if (t.getState() != Thread.State.TERMINATED) {
//if (!t.isAlive()) {
allDone = false;
break;
}
}
if (allDone) {
break;
}
}
LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig);
} catch (Throwable t) {
LOG.error("", t);
}
}
static class FixPseudoIdRunnable implements Runnable {
private String subscriberConfig = null;
private List<Long> patientIds = null;
private Map<Long, Long> hmOrgIds = null;
private Map<Long, Long> hmPersonIds = null;
private AtomicInteger done = null;
public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) {
this.subscriberConfig = subscriberConfig;
this.patientIds = patientIds;
this.hmOrgIds = hmOrgIds;
this.hmPersonIds = hmPersonIds;
this.done = done;
}
@Override
public void run() {
try {
doRun();
} catch (Throwable t) {
LOG.error("", t);
}
}
private void doRun() throws Exception {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
Statement statement = subscriberConnection.createStatement();
JsonNode saltNode = config.get("pseudonymisation");
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(saltNode.toString(), Object.class);
String linkDistributors = mapper.writeValueAsString(json);
LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class);
LinkDistributorConfig[] arr = null;
JsonNode linkDistributorsNode = config.get("linkedDistributors");
if (linkDistributorsNode != null) {
json = mapper.readValue(linkDistributorsNode.toString(), Object.class);
linkDistributors = mapper.writeValueAsString(json);
arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class);
}
//PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig);
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection subscriberTransformConnection = session.connection();
Statement subscriberTransformStatement = subscriberTransformConnection.createStatement();
String sql = null;
ResultSet rs = null;
for (Long patientId: patientIds) {
Long orgId = hmOrgIds.get(patientId);
Long personId = hmPersonIds.get(patientId);
//find service ID
sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId;
rs = subscriberTransformStatement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId);
}
String serviceId = rs.getString(1);
rs.close();
//find patient ID
sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId;
rs = subscriberTransformStatement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find resource iD for patient ID " + patientId);
}
String resourceType = rs.getString(1);
String resourceId = rs.getString(2);
rs.close();
if (!resourceType.equals("Patient")) {
throw new Exception("Not a patient resource type for enterprise ID " + patientId);
}
//get patient
Resource resource = null;
try {
resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId);
} catch (Exception ex) {
throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex);
}
if (resource == null) {
LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
continue;
//throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
}
Patient patient = (Patient)resource;
//generate new pseudo ID
String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt);
//save to person
if (Strings.isNullOrEmpty(pseudoId)) {
sql = "UPDATE person"
+ " SET pseudo_id = null"
+ " WHERE id = " + personId;
statement.executeUpdate(sql);
} else {
sql = "UPDATE person"
+ " SET pseudo_id = '" + pseudoId + "'"
+ " WHERE id = " + personId;
statement.executeUpdate(sql);
}
//save to patient
if (Strings.isNullOrEmpty(pseudoId)) {
sql = "UPDATE patient"
+ " SET pseudo_id = null"
+ " WHERE id = " + patientId;
statement.executeUpdate(sql);
} else {
sql = "UPDATE patient"
+ " SET pseudo_id = '" + pseudoId + "'"
+ " WHERE id = " + patientId;
statement.executeUpdate(sql);
}
//linked distributers
if (arr != null) {
for (LinkDistributorConfig linked: arr) {
String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked);
sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')"
+ " ON DUPLICATE KEY UPDATE"
+ " target_salt_key_name = VALUES(target_salt_key_name),"
+ " target_skid = VALUES(target_skid)";
statement.executeUpdate(sql);
}
}
//save to subscriber transform
sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'";
subscriberTransformStatement.executeUpdate(sql);
if (!Strings.isNullOrEmpty(pseudoId)) {
sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')";
subscriberTransformStatement.executeUpdate(sql);
}
subscriberConnection.commit();
subscriberTransformConnection.commit();
int doneLocal = done.incrementAndGet();
if (doneLocal % 1000 == 0) {
LOG.debug("Done " + doneLocal);
}
}
statement.close();
subscriberTransformStatement.close();
subscriberConnection.close();
subscriberTransformConnection.close();
}
}
private static void fixDeceasedPatients(String subscriberConfig) {
LOG.debug("Fixing Deceased Patients for " + subscriberConfig);
try {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
Map<Long, Long> patientIds = new HashMap<>();
String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL";
Statement statement = subscriberConnection.createStatement();
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
long patientId = rs.getLong(1);
long orgId = rs.getLong(2);
patientIds.put(new Long(patientId), new Long(orgId));
}
rs.close();
statement.close();
EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig);
EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection subscriberTransformConnection = session.connection();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
for (Long patientId: patientIds.keySet()) {
Long orgId = patientIds.get(patientId);
statement = subscriberTransformConnection.createStatement();
sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId;
rs = statement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId);
}
String serviceId = rs.getString(1);
rs.close();
sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId;
rs = statement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find resource iD for patient ID " + patientId);
}
String resourceType = rs.getString(1);
String resourceId = rs.getString(2);
rs.close();
statement.close();
Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId);
if (resource == null) {
LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
continue;
//throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
}
Patient patient = (Patient)resource;
Date dob = patient.getBirthDate();
Date dod = patient.getDeceasedDateTimeType().getValue();
Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod);
updateEnterprisePatient(patientId, ages, subscriberConnection);
updateEnterprisePerson(patientId, ages, subscriberConnection);
}
subscriberConnection.close();
subscriberTransformConnection.close();
LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception {
//the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement
StringBuilder sb = new StringBuilder();
sb.append("UPDATE patient SET ");
sb.append("age_years = ?, ");
sb.append("age_months = ?, ");
sb.append("age_weeks = ? ");
sb.append("WHERE id = ?");
PreparedStatement update = connection.prepareStatement(sb.toString());
if (ages[EnterpriseAge.UNIT_YEARS] == null) {
update.setNull(1, Types.INTEGER);
} else {
update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]);
}
if (ages[EnterpriseAge.UNIT_MONTHS] == null) {
update.setNull(2, Types.INTEGER);
} else {
update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]);
}
if (ages[EnterpriseAge.UNIT_WEEKS] == null) {
update.setNull(3, Types.INTEGER);
} else {
update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]);
}
update.setLong(4, enterprisePatientId);
update.addBatch();
update.executeBatch();
connection.commit();
LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks");
}
private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception {
//update the age fields on the person table where the person is for our patient and their pseudo IDs match
StringBuilder sb = new StringBuilder();
sb.append("UPDATE patient, person SET ");
sb.append("person.age_years = ?, ");
sb.append("person.age_months = ?, ");
sb.append("person.age_weeks = ? ");
sb.append("WHERE patient.id = ? ");
sb.append("AND patient.person_id = person.id ");
sb.append("AND patient.pseudo_id = person.pseudo_id");
PreparedStatement update = connection.prepareStatement(sb.toString());
if (ages[EnterpriseAge.UNIT_YEARS] == null) {
update.setNull(1, Types.INTEGER);
} else {
update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]);
}
if (ages[EnterpriseAge.UNIT_MONTHS] == null) {
update.setNull(2, Types.INTEGER);
} else {
update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]);
}
if (ages[EnterpriseAge.UNIT_WEEKS] == null) {
update.setNull(3, Types.INTEGER);
} else {
update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]);
}
update.setLong(4, enterprisePatientId);
update.addBatch();
update.executeBatch();
connection.commit();
}
private static void testS3Read(String s3BucketName, String keyName, String start, String len) {
LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes");
try {
AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder
.standard()
.withCredentials(DefaultAWSCredentialsProviderChain.getInstance())
.withRegion(Regions.EU_WEST_2);
AmazonS3 s3Client = clientBuilder.build();
GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName);
long startInt = Long.parseLong(start);
long lenInt = Long.parseLong(len);
long endInt = startInt + lenInt;
request.setRange(startInt, endInt);
long startMs = System.currentTimeMillis();
S3Object object = s3Client.getObject(request);
InputStream inputStream = object.getObjectContent();
InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset());
StringBuilder sb = new StringBuilder();
char[] buf = new char[100];
while (true) {
int read = reader.read(buf);
if (read == -1
|| sb.length() >= lenInt) {
break;
}
sb.append(buf, 0, read);
}
reader.close();
long endMs = System.currentTimeMillis();
LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms");
LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createTransforMap(UUID serviceId, String table, String outputFile) {
LOG.debug("Creating transform map for " + serviceId + " from " + table);
try {
//retrieve from table
EntityManager transformEntityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId);
SessionImpl session2 = (SessionImpl)transformEntityManager.getDelegate();
Connection mappingConnection = session2.connection();
EntityManager ehrEntityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session3 = (SessionImpl)ehrEntityManager.getDelegate();
Connection ehrConnection = session3.connection();
String sql = "SELECT resource_type, resource_id, version FROM " + table;
Statement statement = mappingConnection.createStatement();
statement.setFetchSize(1000);
ResultSet rs = statement.executeQuery(sql);
LOG.debug("Got resource IDs from DB");
Map<String, Map<String, List<String>>> hm = new HashMap<>();
int count = 0;
//build map up per resource
while (rs.next()) {
String resourceType = rs.getString("resource_type");
String resourceId = rs.getString("resource_id");
String resourceVersion = rs.getString("version");
/*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*/
/*sql = "SELECT * FROM resource_field_mappings WHERE version = ?";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*/
sql = "SELECT * FROM resource_field_mappings WHERE resource_type = '" + resourceType + "' AND resource_id = '" + resourceId + "' AND version = '" + resourceVersion + "';";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);
//sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?";
//sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?";
//statement1.setString(1, resourceVersion);
/*statement1.setString(1, resourceType);
statement1.setString(2, resourceId);
statement1.setString(3, resourceVersion);*/
ResultSet rs1 = null;
try {
rs1 = statement1.executeQuery(sql);
} catch (Exception ex) {
LOG.error("" + statement1);
throw ex;
}
rs1.next();
String jsonStr = rs1.getString("mappings_json");
rs1.close();
statement1.close();
sql = "SELECT * FROM resource_history WHERE resource_type = ? AND resource_id = ? AND version = ?";
statement1 = ehrConnection.prepareStatement(sql);
statement1.setString(1, resourceType);
statement1.setString(2, resourceId);
statement1.setString(3, resourceVersion);
rs1 = statement1.executeQuery();
if (!rs1.next()) {
throw new Exception("Failed to find resource_history for " + statement1.toString());
}
String s = rs1.getString("resource_data");
rs1.close();
statement1.close();
if (Strings.isNullOrEmpty(s)) {
continue;
}
JsonNode resourceJson = ObjectMapperPool.getInstance().readTree(s);
Map<String, List<String>> hmResourceType = hm.get(resourceType);
if (hmResourceType == null) {
hmResourceType = new HashMap<>();
hm.put(resourceType, hmResourceType);
}
JsonNode json = ObjectMapperPool.getInstance().readTree(jsonStr);
for (int i=0; i<json.size(); i++) {
JsonNode child = json.get(i);
JsonNode idNode = child.get("auditId");
JsonNode colsNode = child.get("cols");
if (idNode == null) {
throw new Exception("No ID node in " + jsonStr);
}
if (colsNode == null) {
throw new Exception("No cols node in " + jsonStr);
}
long id = idNode.asLong();
//get source file ID
sql = "SELECT * FROM source_file_record WHERE id = ?";
statement1 = mappingConnection.prepareStatement(sql);
statement1.setLong(1, id);
rs1 = statement1.executeQuery();
rs1.next();
long sourceFileId = rs1.getLong("source_file_id");
rs1.close();
statement1.close();
//get source file type
sql = "SELECT * FROM source_file WHERE id = ?";
statement1 = mappingConnection.prepareStatement(sql);
statement1.setLong(1, sourceFileId);
rs1 = statement1.executeQuery();
rs1.next();
long sourceFileType = rs1.getLong("source_file_type_id");
rs1.close();
statement1.close();
//get the type desc
sql = "SELECT * FROM source_file_type WHERE id = ?";
statement1 = mappingConnection.prepareStatement(sql);
statement1.setLong(1, sourceFileType);
rs1 = statement1.executeQuery();
rs1.next();
String fileTypeDesc = rs1.getString("description");
rs1.close();
statement1.close();
//get the cols
Map<Integer, String> hmCols = new HashMap<>();
sql = "SELECT * FROM source_file_type_column WHERE source_file_type_id = ?";
statement1 = mappingConnection.prepareStatement(sql);
statement1.setLong(1, sourceFileType);
rs1 = statement1.executeQuery();
while (rs1.next()) {
int index = rs1.getInt("column_index");
String name = rs1.getString("column_name");
hmCols.put(new Integer(index), name);
}
rs1.close();
statement1.close();
for (int j=0; j<colsNode.size(); j++) {
JsonNode colNode = colsNode.get(j);
int col = colNode.get("col").asInt();
String jsonField = colNode.get("field").asText();
int index = jsonField.indexOf("[");
while (index > -1) {
int endIndex = jsonField.indexOf("]", index);
String prefix = jsonField.substring(0, index + 1);
String suffix = jsonField.substring(endIndex);
if (prefix.equals("extension[")) {
String val = jsonField.substring(index+1, endIndex);
int extensionIndex = Integer.parseInt(val);
JsonNode extensionArray = resourceJson.get("extension");
JsonNode extensionRoot = extensionArray.get(extensionIndex);
String extensionUrl = extensionRoot.get("url").asText();
extensionUrl = extensionUrl.replace("http://endeavourhealth.org/fhir/StructureDefinition/", "");
extensionUrl = extensionUrl.replace("http://hl7.org/fhir/StructureDefinition/", "");
jsonField = prefix + extensionUrl + suffix;
} else {
jsonField = prefix + "n" + suffix;
}
index = jsonField.indexOf("[", endIndex);
}
String colName = hmCols.get(new Integer(col));
String fileTypeAndCol = fileTypeDesc + ":" + colName;
List<String> fieldNameMappings = hmResourceType.get(jsonField);
if (fieldNameMappings == null) {
fieldNameMappings = new ArrayList<>();
hmResourceType.put(jsonField, fieldNameMappings);
}
if (!fieldNameMappings.contains(fileTypeAndCol)) {
fieldNameMappings.add(fileTypeAndCol);
}
}
}
count ++;
if (count % 500 == 0) {
LOG.debug("Done " + count);
}
}
LOG.debug("Done " + count);
rs.close();
ehrEntityManager.close();
//create output file
List<String> lines = new ArrayList<>();
List<String> resourceTypes = new ArrayList<>(hm.keySet());
Collections.sort(resourceTypes, String.CASE_INSENSITIVE_ORDER);
for (String resourceType: resourceTypes) {
lines.add("============================================================");
lines.add(resourceType);
lines.add("============================================================");
Map<String, List<String>> hmResourceType = hm.get(resourceType);
List<String> fields = new ArrayList<>(hmResourceType.keySet());
Collections.sort(fields, String.CASE_INSENSITIVE_ORDER);
for (String field: fields) {
String linePrefix = field + " = ";
List<String> sourceRecords = hmResourceType.get(field);
for (String sourceRecord: sourceRecords) {
lines.add(linePrefix + sourceRecord);
linePrefix = Strings.repeat(" ", linePrefix.length());
}
lines.add("");
}
lines.add("");
}
File f = new File(outputFile);
Path p = f.toPath();
Files.write(p, lines, Charset.defaultCharset(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING);
LOG.debug("Finished creating transform map from " + table);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixBartsPatients(UUID serviceId) {
LOG.debug("Fixing Barts patients at service " + serviceId);
try {
EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)edsEntityManager.getDelegate();
Connection edsConnection = session.connection();
int checked = 0;
int fixed = 0;
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';";
Statement s = edsConnection.createStatement();
s.setFetchSize(10000); //don't get all rows at once
ResultSet rs = s.executeQuery(sql);
LOG.info("Got raw results back");
while (rs.next()) {
String patientId = rs.getString(1);
ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId));
if (wrapper == null) {
LOG.error("Failed to get recource current for ID " + patientId);
continue;
}
String oldJson = wrapper.getResourceData();
Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson);
PatientBuilder patientBuilder = new PatientBuilder(patient);
List<String> numbersFromCsv = new ArrayList<>();
if (patient.hasTelecom()) {
for (ContactPoint contactPoint: patient.getTelecom()) {
if (contactPoint.hasId()) {
numbersFromCsv.add(contactPoint.getValue());
}
}
for (String numberFromCsv: numbersFromCsv) {
PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv);
}
}
List<HumanName> namesFromCsv = new ArrayList<>();
if (patient.hasName()) {
for (HumanName name: patient.getName()) {
if (name.hasId()) {
namesFromCsv.add(name);
}
}
for (HumanName name: namesFromCsv) {
PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name);
}
}
List<Address> addressesFromCsv = new ArrayList<>();
if (patient.hasAddress()) {
for (Address address: patient.getAddress()) {
if (address.hasId()) {
addressesFromCsv.add(address);
}
}
for (Address address: addressesFromCsv) {
PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address);
}
}
String newJson = FhirSerializationHelper.serializeResource(patient);
if (!newJson.equals(oldJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed ++;
}
checked ++;
if (checked % 1000 == 0) {
LOG.debug("Checked " + checked + " fixed " + fixed);
}
}
LOG.debug("Checked " + checked + " fixed " + fixed);
rs.close();
s.close();
edsEntityManager.close();
LOG.debug("Finish Fixing Barts patients at service " + serviceId);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void postToRabbit(String exchangeName, String srcFile, Integer throttle) {
LOG.info("Posting to " + exchangeName + " from " + srcFile);
if (throttle != null) {
LOG.info("Thrttled to " + throttle + " messages/second");
}
try {
List<UUID> exchangeIds = new ArrayList<>();
File src = new File(srcFile);
List<String> lines = Files.readAllLines(src.toPath());
for (String line: lines) {
if (!Strings.isNullOrEmpty(line)) {
try {
UUID uuid = UUID.fromString(line);
exchangeIds.add(uuid);
} catch (Exception ex) {
LOG.error("Skipping line " + line);
}
}
}
LOG.info("Found " + exchangeIds.size() + " to post to " + exchangeName);
//create file of ones done
File dir = src.getParentFile();
String name = "DONE" + src.getName();
File dst = new File(dir, name);
if (dst.exists()) {
lines = Files.readAllLines(dst.toPath());
for (String line : lines) {
if (!Strings.isNullOrEmpty(line)) {
try {
UUID uuid = UUID.fromString(line);
exchangeIds.remove(uuid);
} catch (Exception ex) {
LOG.error("Skipping line " + line);
}
}
}
LOG.info("After removing done ones, now have " + exchangeIds.size());
}
continueOrQuit();
FileWriter fileWriter = new FileWriter(dst);
PrintWriter printWriter = new PrintWriter(fileWriter);
long startMs = System.currentTimeMillis();
int doneThisSecond = 0;
LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName);
for (int i=0; i<exchangeIds.size(); i++) {
UUID exchangeId = exchangeIds.get(i);
List<UUID> tmp = new ArrayList<>();
tmp.add(exchangeId);
QueueHelper.postToExchange(tmp, exchangeName, null, true);
printWriter.println(exchangeId.toString());
printWriter.flush();
if (i % 5000 == 0) {
LOG.debug("Done " + i + " / " + exchangeIds.size());
}
if (throttle != null) {
doneThisSecond ++;
if (doneThisSecond > throttle.intValue()) {
long now = System.currentTimeMillis();
long sleep = 1000 - (now - startMs);
if (sleep > 0) {
Thread.sleep(sleep);
}
startMs = now;
doneThisSecond = 0;
}
}
}
printWriter.close();
LOG.info("Finished Posting to " + exchangeName+ " from " + srcFile);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void postToProtocol(String srcFile) {
LOG.info("Posting to protocol from " + srcFile);
try {
List<UUID> exchangeIds = new ArrayList<>();
List<String> lines = Files.readAllLines(new File(srcFile).toPath());
for (String line: lines) {
if (!Strings.isNullOrEmpty(line)) {
UUID uuid = UUID.fromString(line);
exchangeIds.add(uuid);
}
}
LOG.info("Posting " + exchangeIds.size() + " to Protocol queue");
QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false);
LOG.info("Finished Posting to protocol from " + srcFile);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void populateSubscriberUprnTable(String subscriberConfigName) throws Exception {
LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName);
try {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber");
//changed the format of the JSON
JsonNode pseudoNode = config.get("pseudonymisation");
boolean pseudonymised = pseudoNode != null;
byte[] saltBytes = null;
if (pseudonymised) {
JsonNode saltNode = pseudoNode.get("salt");
String base64Salt = saltNode.asText();
saltBytes = Base64.getDecoder().decode(base64Salt);
}
/*boolean pseudonymised = config.get("pseudonymised").asBoolean();
byte[] saltBytes = null;
if (pseudonymised) {
JsonNode saltNode = config.get("salt");
String base64Salt = saltNode.asText();
saltBytes = Base64.getDecoder().decode(base64Salt);
}*/
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
String upsertSql;
if (pseudonymised) {
upsertSql = "INSERT INTO patient_uprn"
+ " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)"
+ " VALUES"
+ " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ " ON DUPLICATE KEY UPDATE"
+ " organization_id = VALUES(organization_id),"
+ " person_id = VALUES(person_id),"
+ " lsoa_code = VALUES(lsoa_code),"
+ " pseudo_uprn = VALUES(pseudo_uprn),"
+ " qualifier = VALUES(qualifier),"
+ " `algorithm` = VALUES(`algorithm`),"
+ " `match` = VALUES(`match`),"
+ " no_address = VALUES(no_address),"
+ " invalid_address = VALUES(invalid_address),"
+ " missing_postcode = VALUES(missing_postcode),"
+ " invalid_postcode = VALUES(invalid_postcode)";
} else {
upsertSql = "INSERT INTO patient_uprn"
+ " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)"
+ " VALUES"
+ " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ " ON DUPLICATE KEY UPDATE"
+ " organization_id = VALUES(organization_id),"
+ " person_id = VALUES(person_id),"
+ " lsoa_code = VALUES(lsoa_code),"
+ " uprn = VALUES(uprn),"
+ " qualifier = VALUES(qualifier),"
+ " `algorithm` = VALUES(`algorithm`),"
+ " `match` = VALUES(`match`),"
+ " no_address = VALUES(no_address),"
+ " invalid_address = VALUES(invalid_address),"
+ " missing_postcode = VALUES(missing_postcode),"
+ " invalid_postcode = VALUES(invalid_postcode)";
}
PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql);
int inBatch = 0;
EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)edsEntityManager.getDelegate();
Connection edsConnection = session.connection();
EnterpriseIdDalI enterpriseIdDal = DalProvider.factoryEnterpriseIdDal(subscriberConfigName);
PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal();
PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal();
int checked = 0;
int saved = 0;
String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn";
Statement s = edsConnection.createStatement();
s.setFetchSize(10000); //don't get all rows at once
ResultSet rs = s.executeQuery(sql);
LOG.info("Got raw results back");
while (rs.next()) {
int col = 1;
String serviceId = rs.getString(col++);
String patientId = rs.getString(col++);
Long uprn = rs.getLong(col++);
if (rs.wasNull()) {
uprn = null;
}
String qualifier = rs.getString(col++);
String abpAddress = rs.getString(col++);
String algorithm = rs.getString(col++);
String match = rs.getString(col++);
boolean noAddress = rs.getBoolean(col++);
boolean invalidAddress = rs.getBoolean(col++);
boolean missingPostcode = rs.getBoolean(col++);
boolean invalidPostcode = rs.getBoolean(col++);
//check if patient ID already exists in the subscriber DB
Long subscriberPatientId = enterpriseIdDal.findEnterpriseId(ResourceType.Patient.toString(), patientId);
//if the patient doesn't exist on this subscriber DB, then don't transform this record
if (subscriberPatientId != null) {
Long subscriberOrgId = enterpriseIdDal.findEnterpriseOrganisationId(serviceId);
String discoveryPersonId = patientLinkDal.getPersonId(patientId);
Long subscriberPersonId = enterpriseIdDal.findOrCreateEnterprisePersonId(discoveryPersonId);
String lsoaCode = null;
if (!Strings.isNullOrEmpty(abpAddress)) {
String[] toks = abpAddress.split(" ");
String postcode = toks[toks.length - 1];
PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode);
if (postcodeReference != null) {
lsoaCode = postcodeReference.getLsoaCode();
}
}
col = 1;
psUpsert.setLong(col++, subscriberPatientId);
psUpsert.setLong(col++, subscriberOrgId);
psUpsert.setLong(col++, subscriberPersonId);
psUpsert.setString(col++, lsoaCode);
if (pseudonymised) {
String pseuoUprn = null;
if (uprn != null) {
TreeMap<String, String> keys = new TreeMap<>();
keys.put("UPRN", "" + uprn);
Crypto crypto = new Crypto();
crypto.SetEncryptedSalt(saltBytes);
pseuoUprn = crypto.GetDigest(keys);
}
psUpsert.setString(col++, pseuoUprn);
} else {
if (uprn != null) {
psUpsert.setLong(col++, uprn.longValue());
} else {
psUpsert.setNull(col++, Types.BIGINT);
}
}
psUpsert.setString(col++, qualifier);
psUpsert.setString(col++, algorithm);
psUpsert.setString(col++, match);
psUpsert.setBoolean(col++, noAddress);
psUpsert.setBoolean(col++, invalidAddress);
psUpsert.setBoolean(col++, missingPostcode);
psUpsert.setBoolean(col++, invalidPostcode);
//LOG.debug("" + psUpsert);
psUpsert.addBatch();
inBatch++;
saved++;
if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) {
psUpsert.executeBatch();
subscriberConnection.commit();
inBatch = 0;
}
}
checked ++;
if (checked % 1000 == 0) {
LOG.info("Checked " + checked + " Saved " + saved);
}
}
if (inBatch > 0) {
psUpsert.executeBatch();
subscriberConnection.commit();
}
LOG.info("Chcked " + checked + " Saved " + saved);
psUpsert.close();
subscriberConnection.close();
edsEntityManager.close();
LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixPersonsNoNhsNumber() {
LOG.info("Fixing persons with no NHS number");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
List<Service> services = serviceDal.getAll();
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection patientSearchConnection = session.connection();
Statement patientSearchStatement = patientSearchConnection.createStatement();
for (Service service: services) {
LOG.info("Doing " + service.getName() + " " + service.getId());
int checked = 0;
int fixedPersons = 0;
int fixedSearches = 0;
String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)";
ResultSet rs = patientSearchStatement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String nhsNumber = rs.getString(2);
//find matched person ID
String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'";
Statement s = patientSearchConnection.createStatement();
ResultSet rsPersonId = s.executeQuery(personIdSql);
String personId = null;
if (rsPersonId.next()) {
personId = rsPersonId.getString(1);
}
rsPersonId.close();
s.close();
if (Strings.isNullOrEmpty(personId)) {
LOG.error("Patient " + patientId + " has no person ID");
continue;
}
//see whether person ID used NHS number to match
String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'";
s = patientSearchConnection.createStatement();
ResultSet rsPatientLink = s.executeQuery(patientLinkSql);
String matchingNhsNumber = null;
if (rsPatientLink.next()) {
matchingNhsNumber = rsPatientLink.getString(1);
}
rsPatientLink.close();
s.close();
//if patient link person has a record for this nhs number, update the person link
if (!Strings.isNullOrEmpty(matchingNhsNumber)) {
String newPersonId = UUID.randomUUID().toString();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String createdAtStr = sdf.format(new Date());
s = patientSearchConnection.createStatement();
//new record in patient link history
String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')";
//LOG.debug(patientHistorySql);
s.execute(patientHistorySql);
//update patient link
String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'";
s.execute(patientLinkUpdateSql);
patientSearchConnection.commit();
s.close();
fixedPersons ++;
}
//if patient search has an invalid NHS number, update it
if (!Strings.isNullOrEmpty(nhsNumber)) {
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId);
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
patientSearchDal.update(service.getId(), patient);
fixedSearches ++;
}
checked ++;
if (checked % 50 == 0) {
LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches);
}
}
LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches);
rs.close();
}
patientSearchStatement.close();
entityManager.close();
LOG.info("Finished fixing persons with no NHS number");
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void checkDeletedObs(UUID serviceId, UUID systemId) {
LOG.info("Checking Observations for " + serviceId);
try {
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
List<ResourceType> potentialResourceTypes = new ArrayList<>();
potentialResourceTypes.add(ResourceType.Procedure);
potentialResourceTypes.add(ResourceType.AllergyIntolerance);
potentialResourceTypes.add(ResourceType.FamilyMemberHistory);
potentialResourceTypes.add(ResourceType.Immunization);
potentialResourceTypes.add(ResourceType.DiagnosticOrder);
potentialResourceTypes.add(ResourceType.Specimen);
potentialResourceTypes.add(ResourceType.DiagnosticReport);
potentialResourceTypes.add(ResourceType.ReferralRequest);
potentialResourceTypes.add(ResourceType.Condition);
potentialResourceTypes.add(ResourceType.Observation);
List<String> subscriberConfigs = new ArrayList<>();
subscriberConfigs.add("ceg_data_checking");
subscriberConfigs.add("ceg_enterprise");
subscriberConfigs.add("hurley_data_checking");
subscriberConfigs.add("hurley_deidentified");
Set<String> observationsNotDeleted = new HashSet<>();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
ExchangePayloadFile firstItem = payload.get(0);
//String version = EmisCsvToFhirTransformer.determineVersion(payload);
//if we've reached the point before we process data for this practice, break out
try {
if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) {
break;
}
} catch (TransformException e) {
LOG.info("Skipping exchange containing " + firstItem.getPath());
continue;
}
String name = FilenameUtils.getBaseName(firstItem.getPath());
String[] toks = name.split("_");
String agreementId = toks[4];
LOG.info("Doing exchange containing " + firstItem.getPath());
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true);
Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId());
for (ExchangeBatch batch : batches) {
if (batch.getEdsPatientId() != null) {
hmBatchesByPatient.put(batch.getEdsPatientId(), batch);
}
}
for (ExchangePayloadFile item : payload) {
String type = item.getType();
if (type.equals("CareRecord_Observation")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String deleted = record.get("Deleted");
String observationId = record.get("ObservationGuid");
if (deleted.equalsIgnoreCase("true")) {
//if observation was reinstated at some point, skip it
if (observationsNotDeleted.contains(observationId)) {
continue;
}
String patientId = record.get("PatientGuid");
CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId);
CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId);
Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell);
for (ResourceType resourceType: resourceTypes) {
//will already have been done OK
if (resourceType == ResourceType.Observation) {
continue;
}
String sourceId = patientId + ":" + observationId;
UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId);
if (uuid == null) {
throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId);
}
LOG.debug("Fixing " + resourceType + " " + uuid);
//create file of IDs to delete for each subscriber DB
for (String subscriberConfig : subscriberConfigs) {
EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig);
Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString());
if (enterpriseId == null) {
continue;
}
String sql = null;
if (resourceType == ResourceType.AllergyIntolerance) {
sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId;
} else if (resourceType == ResourceType.ReferralRequest) {
sql = "DELETE FROM referral_request WHERE id = " + enterpriseId;
} else {
sql = "DELETE FROM observation WHERE id = " + enterpriseId;
}
sql += "\n";
File f = new File(subscriberConfig + ".sql");
Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
}
//delete resource if not already done
ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid);
if (resourceWrapper != null && !resourceWrapper.isDeleted()) {
ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId());
resourceWrapper.setDeleted(true);
resourceWrapper.setResourceData(null);
resourceWrapper.setResourceMetadata("");
resourceWrapper.setExchangeBatchId(batch.getBatchId());
resourceWrapper.setVersion(UUID.randomUUID());
resourceWrapper.setCreatedAt(new Date());
resourceWrapper.setExchangeId(exchange.getId());
resourceDal.delete(resourceWrapper);
}
}
} else {
observationsNotDeleted.add(observationId);
}
}
parser.close();
}
}
}
LOG.info("Finished Checking Observations for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) {
LOG.info("Testing Batch Inserts");
try {
int inserts = Integer.parseInt(num);
int batchSize = Integer.parseInt(batchSizeStr);
LOG.info("Openning Connection");
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
//String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);";
String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)";
PreparedStatement ps = conn.prepareStatement(sql);
if (batchSize == 1) {
LOG.info("Testing non-batched inserts");
long start = System.currentTimeMillis();
for (int i = 0; i < inserts; i++) {
int col = 1;
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, randomStr());
ps.execute();
}
long end = System.currentTimeMillis();
LOG.info("Done " + inserts + " in " + (end - start) + " ms");
} else {
LOG.info("Testing batched inserts with batch size " + batchSize);
long start = System.currentTimeMillis();
for (int i = 0; i < inserts; i++) {
int col = 1;
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, randomStr());
ps.addBatch();
if ((i + 1) % batchSize == 0
|| i + 1 >= inserts) {
ps.executeBatch();
}
}
long end = System.currentTimeMillis();
LOG.info("Done " + inserts + " in " + (end - start) + " ms");
}
ps.close();
conn.close();
LOG.info("Finished Testing Batch Inserts");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static String randomStr() {
StringBuffer sb = new StringBuffer();
Random r = new Random(System.currentTimeMillis());
while (sb.length() < 1100) {
sb.append(r.nextLong());
}
return sb.toString();
}
/*private static void fixEmisProblems(UUID serviceId, UUID systemId) {
LOG.info("Fixing Emis Problems for " + serviceId);
try {
Map<String, List<String>> hmReferences = new HashMap<>();
Set<String> patientIds = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null);
LOG.info("Caching problem links");
//Go through all files to work out problem children for every problem
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (int i=exchanges.size()-1; i>=0; i--) {
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
//String version = EmisCsvToFhirTransformer.determineVersion(payload);
ExchangePayloadFile firstItem = payload.get(0);
String name = FilenameUtils.getBaseName(firstItem.getPath());
String[] toks = name.split("_");
String agreementId = toks[4];
LOG.info("Doing exchange containing " + firstItem.getPath());
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true);
for (ExchangePayloadFile item: payload) {
String type = item.getType();
if (type.equals("CareRecord_Observation")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("ObservationGuid");
String localId = patientId + ":" + observationId;
ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId));
Reference localReference = ReferenceHelper.createReference(resourceType, localId);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else if (type.equals("Prescribing_DrugRecord")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemObservationGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("DrugRecordGuid");
String localId = patientId + ":" + observationId;
Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else if (type.equals("Prescribing_IssueRecord")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemObservationGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("IssueRecordGuid");
String localId = patientId + ":" + observationId;
Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else {
//no problem link
}
}
}
LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
for (String localPatientId: patientIds) {
Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId);
Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer);
String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference);
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString());
for (ResourceWrapper wrapper: wrappers) {
if (wrapper.isDeleted()) {
continue;
}
String originalJson = wrapper.getResourceData();
Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson);
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
//sort out the nested extension references
Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED);
if (outerExtension != null) {
Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER);
if (innerExtension != null) {
Reference performerReference = (Reference)innerExtension.getValue();
String value = performerReference.getReference();
if (value.endsWith("}")) {
Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer);
innerExtension.setValue(globalPerformerReference);
}
}
}
//sort out the contained list of children
ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder);
//remove any existing children
listBuilder.removeContainedList();
//add all the new ones we've found
List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString());
if (localChildReferences != null) {
for (String localChildReference: localChildReferences) {
Reference reference = ReferenceHelper.createReference(localChildReference);
listBuilder.addContainedListItem(reference);
}
}
//save the updated condition
String newJson = FhirSerializationHelper.serializeResource(condition);
if (!newJson.equals(originalJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed ++;
}
}
done ++;
if (done % 1000 == 0) {
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
}
}
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
LOG.info("Finished Emis Problems for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) {
try {
LOG.info("Doing fix for " + publisher);
String[] done = new String[]{
"01fcfe94-5dfd-4951-b74d-129f874209b0",
"07a267d3-189b-4968-b9b0-547de28edef5",
"0b9601d1-f7ab-4f5d-9f77-1841050f75ab",
"0fd2ff5d-2c25-4707-afe8-707e81a250b8",
"14276da8-c344-4841-a36d-aa38940e78e7",
"158251ca-0e1d-4471-8fae-250b875911e1",
"160131e2-a5ff-49c8-b62e-ae499a096193",
"16490f2b-62ce-44c6-9816-528146272340",
"18fa1bed-b9a0-4d55-a0cc-dfc31831259a",
"19cba169-d41e-424a-812f-575625c72305",
"19ff6a03-25df-4e61-9ab1-4573cfd24729",
"1b3d1627-f49e-4103-92d6-af6016476da3",
"1e198fbb-c9cd-429a-9b50-0f124d0d825c",
"20444fbe-0802-46fc-8203-339a36f52215",
"21e27bf3-8071-48dd-924f-1d8d21f9216f",
"23203e72-a3b0-4577-9942-30f7cdff358e",
"23be1f4a-68ec-4a49-b2ec-aa9109c99dcd",
"2b56033f-a9b4-4bab-bb53-c619bdb38895",
"2ba26f2d-8068-4b77-8e62-431edfc2c2e2",
"2ed89931-0ce7-49ea-88ac-7266b6c03be0",
"3abf8ded-f1b1-495b-9a2d-5d0223e33fa7",
"3b0f6720-2ffd-4f8a-afcd-7e3bb311212d",
"415b509a-cf39-45bc-9acf-7f982a00e159",
"4221276f-a3b0-4992-b426-ec2d8c7347f2",
"49868211-d868-4b55-a201-5acac0be0cc0",
"55fdcbd0-9b2d-493a-b874-865ccc93a156",
"56124545-d266-4da9-ba1f-b3a16edc7f31",
"6c11453b-dbf8-4749-a0ec-ab705920e316"
};
ServiceDalI dal = DalProvider.factoryServiceDal();
List<Service> all = dal.getAll();
for (Service service: all) {
if (service.getPublisherConfigName() != null
&& service.getPublisherConfigName().equals(publisher)) {
boolean alreadyDone = false;
String idStr = service.getId().toString();
for (String doneId: done) {
if (idStr.equalsIgnoreCase(doneId)) {
alreadyDone = true;
break;
}
}
if (alreadyDone) {
continue;
}
fixEmisProblems3(service.getId(), systemId);
}
}
LOG.info("Done fix for " + publisher);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixEmisProblems3(UUID serviceId, UUID systemId) {
LOG.info("Fixing Emis Problems 3 for " + serviceId);
try {
Set<String> patientIds = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null);
LOG.info("Finding patients");
//Go through all files to work out problem children for every problem
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile item: payload) {
String type = item.getType();
if (type.equals("Admin_Patient")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
}
parser.close();
}
}
}
LOG.info("Finished checking files, finding " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
for (String localPatientId: patientIds) {
Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId);
Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer);
String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference);
List<ResourceType> potentialResourceTypes = new ArrayList<>();
potentialResourceTypes.add(ResourceType.Procedure);
potentialResourceTypes.add(ResourceType.AllergyIntolerance);
potentialResourceTypes.add(ResourceType.FamilyMemberHistory);
potentialResourceTypes.add(ResourceType.Immunization);
potentialResourceTypes.add(ResourceType.DiagnosticOrder);
potentialResourceTypes.add(ResourceType.Specimen);
potentialResourceTypes.add(ResourceType.DiagnosticReport);
potentialResourceTypes.add(ResourceType.ReferralRequest);
potentialResourceTypes.add(ResourceType.Condition);
potentialResourceTypes.add(ResourceType.Observation);
for (ResourceType resourceType: potentialResourceTypes) {
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString());
for (ResourceWrapper wrapper : wrappers) {
if (wrapper.isDeleted()) {
continue;
}
String originalJson = wrapper.getResourceData();
DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson);
//Also go through all observation records and any that have parent observations - these need fixing too???
Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE);
if (extension != null) {
Reference reference = (Reference)extension.getValue();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
if (resource instanceof Observation) {
Observation obs = (Observation)resource;
if (obs.hasRelated()) {
for (Observation.ObservationRelatedComponent related: obs.getRelated()) {
if (related.hasTarget()) {
Reference reference = related.getTarget();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
}
if (resource instanceof DiagnosticReport) {
DiagnosticReport diag = (DiagnosticReport)resource;
if (diag.hasResult()) {
for (Reference reference: diag.getResult()) {
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
//Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save
if (resource instanceof Condition) {
if (resource.hasContained()) {
for (Resource contained: resource.getContained()) {
if (contained.getId().equals("Items")) {
List_ containedList = (List_)contained;
if (containedList.hasEntry()) {
for (List_.ListEntryComponent entry: containedList.getEntry()) {
Reference reference = entry.getItem();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
}
}
//sort out the nested extension references
Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED);
if (outerExtension != null) {
Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET);
if (innerExtension != null) {
Reference performerReference = (Reference)innerExtension.getValue();
String value = performerReference.getReference();
if (value.endsWith("}")) {
Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer);
innerExtension.setValue(globalPerformerReference);
}
}
}
}
//save the updated condition
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!newJson.equals(originalJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed++;
}
}
}
done ++;
if (done % 1000 == 0) {
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
}
}
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
LOG.info("Finished Emis Problems 3 for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception {
//if it's already something other than observation, we're OK
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference);
if (comps.getResourceType() != ResourceType.Observation) {
return false;
}
Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference);
String sourceId = ReferenceHelper.getReferenceId(sourceReference);
String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId);
if (newReferenceValue == null) {
return false;
}
reference.setReference(newReferenceValue);
return true;
}
private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception {
ResourceDalI dal = DalProvider.factoryResourceDal();
for (ResourceType resourceType: potentials) {
UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId);
if (uuid == null) {
continue;
}
ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid);
if (wrapper != null) {
return ReferenceHelper.createResourceReference(resourceType, uuid.toString());
}
}
return null;
}
*//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095")
}*//*
/*private static void convertExchangeBody(UUID systemUuid) {
try {
LOG.info("Converting exchange bodies for system " + systemUuid);
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE);
if (exchanges.isEmpty()) {
continue;
}
LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges");
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
try {
//already done
ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class);
continue;
} catch (JsonSyntaxException ex) {
//if the JSON can't be parsed, then it'll be the old format of body that isn't JSON
}
List<ExchangePayloadFile> newFiles = new ArrayList<>();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (String file: files) {
ExchangePayloadFile fileObj = new ExchangePayloadFile();
String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1);
fileObj.setPath(fileWithoutSharedStorage);
//size
List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file);
for (FileInfo info: fileInfos) {
if (info.getFilePath().equals(file)) {
long size = info.getSize();
fileObj.setSize(new Long(size));
}
}
//type
if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live
|| systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev
//emis
String name = FilenameUtils.getName(file);
String[] toks = name.split("_");
String first = toks[1];
String second = toks[2];
fileObj.setType(first + "_" + second);
|| systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev
//cerner
String name = FilenameUtils.getName(file);
if (Strings.isNullOrEmpty(name)) {
continue;
}
try {
String type = BartsCsvToFhirTransformer.identifyFileType(name);
fileObj.setType(type);
} catch (Exception ex2) {
throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId());
} else {
throw new Exception("Unknown system ID " + systemUuid);
}
newFiles.add(fileObj);
}
String json = JsonSerializer.serialize(newFiles);
exchange.setBody(json);
exchangeDal.save(exchange);
}
}
LOG.info("Finished Converting exchange bodies for system " + systemUuid);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*LOG.debug(json);
LOG.debug(newJson);*//*
/*private static void fixBartsOrgs(String serviceId) {
try {
LOG.info("Fixing Barts orgs");
ResourceDalI dal = DalProvider.factoryResourceDal();
List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString());
LOG.debug("Found " + wrappers.size() + " resources");
int done = 0;
int fixed = 0;
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId());
ResourceWrapper mostRecent = history.get(0);
String json = mostRecent.getResourceData();
Organization org = (Organization)FhirSerializationHelper.deserializeResource(json);
String odsCode = IdentifierHelper.findOdsCode(org);
if (Strings.isNullOrEmpty(odsCode)
&& org.hasIdentifier()) {
boolean hasBeenFixed = false;
for (Identifier identifier: org.getIdentifier()) {
if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE)
&& identifier.hasId()) {
odsCode = identifier.getId();
identifier.setValue(odsCode);
identifier.setId(null);
hasBeenFixed = true;
}
}
if (hasBeenFixed) {
String newJson = FhirSerializationHelper.serializeResource(org);
mostRecent.setResourceData(newJson);
LOG.debug("Fixed Organization " + org.getId());
saveResourceWrapper(UUID.fromString(serviceId), mostRecent);
fixed ++;
}
}
}
done ++;
if (done % 100 == 0) {
LOG.debug("Done " + done + ", Fixed " + fixed);
}
}
LOG.debug("Done " + done + ", Fixed " + fixed);
LOG.info("Finished Barts orgs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void testPreparedStatements(String url, String user, String pass, String serviceId) {
try {
LOG.info("Testing Prepared Statements");
LOG.info("Url: " + url);
LOG.info("user: " + user);
LOG.info("pass: " + pass);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?";
long start = System.currentTimeMillis();
for (int i=0; i<10000; i++) {
PreparedStatement ps = null;
try {
ps = conn.prepareStatement(sql);
ps.setString(1, serviceId);
ps.setString(2, "MILLPERSIDtoMRN");
ps.setString(3, UUID.randomUUID().toString());
ResultSet rs = ps.executeQuery();
while (rs.next()) {
//do nothing
}
} finally {
if (ps != null) {
ps.close();
}
}
}
long end = System.currentTimeMillis();
LOG.info("Took " + (end-start) + " ms");
//close connection
conn.close();
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
*//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData());
}*//*
*//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
*//**
*//*
/*private static void fixEncounters(String table) {
LOG.info("Fixing encounters from " + table);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date cutoff = sdf.parse("2018-03-14 11:42");
EntityManager entityManager = ConnectionManager.getAdminEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<UUID> serviceIds = new ArrayList<>();
Map<UUID, UUID> hmSystems = new HashMap<>();
String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
UUID serviceId = UUID.fromString(rs.getString(1));
UUID systemId = UUID.fromString(rs.getString(2));
serviceIds.add(serviceId);
hmSystems.put(serviceId, systemId);
}
rs.close();
statement.close();
entityManager.close();
for (UUID serviceId: serviceIds) {
UUID systemId = hmSystems.get(serviceId);
LOG.info("Doing service " + serviceId + " and system " + systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId);
List<UUID> exchangeIdsToProcess = new ArrayList<>();
for (UUID exchangeId: exchangeIds) {
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit: audits) {
Date d = audit.getStarted();
if (d.after(cutoff)) {
exchangeIdsToProcess.add(exchangeId);
break;
}
}
}
Map<String, ReferenceList> consultationNewChildMap = new HashMap<>();
Map<String, ReferenceList> observationChildMap = new HashMap<>();
Map<String, ReferenceList> newProblemChildren = new HashMap<>();
for (UUID exchangeId: exchangeIdsToProcess) {
Exchange exchange = exchangeDal.getExchange(exchangeId);
String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody());
String version = EmisCsvToFhirTransformer.determineVersion(files);
List<String> interestingFiles = new ArrayList<>();
for (String file: files) {
if (file.indexOf("CareRecord_Consultation") > -1
|| file.indexOf("CareRecord_Observation") > -1
|| file.indexOf("CareRecord_Diary") > -1
|| file.indexOf("Prescribing_DrugRecord") > -1
|| file.indexOf("Prescribing_IssueRecord") > -1
|| file.indexOf("CareRecord_Problem") > -1) {
interestingFiles.add(file);
}
}
files = interestingFiles.toArray(new String[0]);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers);
String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers);
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true);
Consultation consultationParser = (Consultation)parsers.get(Consultation.class);
while (consultationParser.nextRecord()) {
CsvCell consultationGuid = consultationParser.getConsultationGuid();
CsvCell patientGuid = consultationParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
consultationNewChildMap.put(sourceId, new ReferenceList());
}
Problem problemParser = (Problem)parsers.get(Problem.class);
while (problemParser.nextRecord()) {
CsvCell problemGuid = problemParser.getObservationGuid();
CsvCell patientGuid = problemParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
newProblemChildren.put(sourceId, new ReferenceList());
}
//run this pre-transformer to pre-cache some stuff in the csv helper, which
//is needed when working out the resource type that each observation would be saved as
ObservationPreTransformer.transform(version, parsers, null, csvHelper);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
CsvCell observationGuid = observationParser.getObservationGuid();
CsvCell patientGuid = observationParser.getPatientGuid();
String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid);
CsvCell codeId = observationParser.getCodeId();
if (codeId.isEmpty()) {
continue;
}
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId);
if (obUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId);
//resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
}
Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString());
CsvCell consultationGuid = observationParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell problemGuid = observationParser.getProblemGuid();
if (!problemGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell parentObGuid = observationParser.getParentObservationGuid();
if (!parentObGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid);
ReferenceList referenceList = observationChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
observationChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
}
Diary diaryParser = (Diary)parsers.get(Diary.class);
while (diaryParser.nextRecord()) {
CsvCell consultationGuid = diaryParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
CsvCell diaryGuid = diaryParser.getDiaryGuid();
CsvCell patientGuid = diaryParser.getPatientGuid();
String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid);
UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId);
if (diaryUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId);
}
Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(diaryReference);
}
}
IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class);
while (issueRecordParser.nextRecord()) {
CsvCell problemGuid = issueRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid();
CsvCell patientGuid = issueRecordParser.getPatientGuid();
String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid);
UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId);
if (issueRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId);
}
Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(issueRecordReference);
}
}
DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class);
while (drugRecordParser.nextRecord()) {
CsvCell problemGuid = drugRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid();
CsvCell patientGuid = drugRecordParser.getPatientGuid();
String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid);
UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId);
if (drugRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId);
}
Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(drugRecordReference);
}
}
for (AbstractCsvParser parser : parsers.values()) {
try {
parser.close();
} catch (IOException ex) {
//don't worry if this fails, as we're done anyway
}
}
}
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix");
for (String encounterSourceId: consultationNewChildMap.keySet()) {
ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId);
//map to UUID
UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId);
if (encounterId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Encounter " + encounterId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(encounter);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix");
for (String sourceId: observationChildMap.keySet()) {
ReferenceList childReferences = observationChildMap.get(sourceId);
//map to UUID
ResourceType resourceType = null;
UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId);
if (resourceId != null) {
resourceType = ResourceType.Observation;
} else {
resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId);
if (resourceId != null) {
resourceType = ResourceType.DiagnosticReport;
} else {
continue;
}
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId);
if (history.isEmpty()) {
//throw new Exception("Empty history for " + resourceType + " " + resourceId);
continue;
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (resourceType == ResourceType.Observation) {
if (wrapper.getResourceData() != null) {
Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (observation.hasRelated()) {
for (Observation.ObservationRelatedComponent related : observation.getRelated()) {
Reference reference = related.getTarget();
childReferences.add(reference);
}
}
}
} else {
if (wrapper.getResourceData() != null) {
DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (report.hasResult()) {
for (Reference reference : report.getResult()) {
childReferences.add(reference);
}
}
}
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
boolean changed = false;
if (resourceType == ResourceType.Observation) {
ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addChildObservation(reference)) {
changed = true;
}
}
} else {
DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addResult(reference)) {
changed = true;
}
}
}
if (changed) {
String newJson = FhirSerializationHelper.serializeResource(resource);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
LOG.info("Found " + newProblemChildren.size() + " Problems to fix");
for (String sourceId: newProblemChildren.keySet()) {
ReferenceList childReferences = newProblemChildren.get(sourceId);
//map to UUID
UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId);
if (conditionId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Condition " + conditionId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(condition);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
//mark as done
String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';";
entityManager = ConnectionManager.getAdminEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
}
* For each practice:
Go through all files processed since 14 March
Cache all links as above
Cache all Encounters saved too
For each Encounter referenced at all:
Retrieve latest version from resource current
Retrieve version prior to 14 March
Update current version with old references plus new ones
For each parent observation:
Retrieve latest version (could be observation or diagnostic report)
For each problem:
Retrieve latest version from resource current
Check if still a problem:
Retrieve version prior to 14 March
Update current version with old references plus new ones
LOG.info("Finished Fixing encounters from " + table);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception {
if (wrapper.getVersion() == null) {
throw new Exception("Can't update resource history without version UUID");
}
if (wrapper.getResourceData() != null) {
long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData());
wrapper.setResourceChecksum(new Long(checksum));
}
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
entityManager.getTransaction().begin();
String json = wrapper.getResourceData();
json = json.replace("'", "''");
json = json.replace("\\", "\\\\");
String patientId = "";
if (wrapper.getPatientId() != null) {
patientId = wrapper.getPatientId().toString();
}
String updateSql = "UPDATE resource_current"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE service_id = '" + wrapper.getServiceId() + "'"
+ " AND patient_id = '" + patientId + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
+ " AND resource_id = '" + wrapper.getResourceId() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
//SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS");
//String createdAtStr = sdf.format(wrapper.getCreatedAt());
updateSql = "UPDATE resource_history"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE resource_id = '" + wrapper.getResourceId() + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
//+ " AND created_at = '" + createdAtStr + "'"
+ " AND version = '" + wrapper.getVersion() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
entityManager.getTransaction().commit();
}
/*private static void populateNewSearchTable(String table) {
LOG.info("Populating New Search Table");
try {
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<String> patientIds = new ArrayList<>();
Map<String, String> serviceIds = new HashMap<>();
String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String serviceId = rs.getString(2);
patientIds.add(patientId);
serviceIds.put(patientId, serviceId);
}
rs.close();
statement.close();
entityManager.close();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearch2Dal();
LOG.info("Found " + patientIds.size() + " to do");
for (int i=0; i<patientIds.size(); i++) {
String patientIdStr = patientIds.get(i);
UUID patientId = UUID.fromString(patientIdStr);
String serviceIdStr = serviceIds.get(patientIdStr);
UUID serviceId = UUID.fromString(serviceIdStr);
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr);
if (patient != null) {
patientSearchDal.update(serviceId, patient);
//find episode of care
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, null, patientId, ResourceType.EpisodeOfCare.toString());
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
patientSearchDal.update(serviceId, episodeOfCare);
}
}
}
String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';";
entityManager = ConnectionManager.getEdsEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
if (i % 5000 == 0) {
LOG.info("Done " + (i+1) + " of " + patientIds.size());
}
}
entityManager.close();
LOG.info("Finished Populating New Search Table");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) {
LOG.info("Creating Barts Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds);
LOG.info("Finished Creating Barts Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
for (File sourceFile: sourceDir.listFiles()) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
LOG.info("Doing dir " + sourceFile);
createBartsSubsetForFile(sourceFile, destFile, personIds);
} else {
//we have some bad partial files in, so ignore them
String ext = FilenameUtils.getExtension(name);
if (ext.equalsIgnoreCase("filepart")) {
continue;
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String baseName = FilenameUtils.getBaseName(name);
String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName);
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex ++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i=0; i<expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}*/
private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception {
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
for (Exchange exchange: exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile fileObj : files) {
String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length()+1);
String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage);
File sourceFile = new File(sourceFilePath);
String destFilePath = fileObj.getPath();
File destFile = new File(destFilePath);
File destDir = destFile.getParentFile();
if (!destDir.exists()) {
destDir.mkdirs();
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String fileType = fileObj.getType();
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i = 0; i < expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}
private static void copyFile(File src, File dst) throws Exception {
FileInputStream fis = new FileInputStream(src);
BufferedInputStream bis = new BufferedInputStream(fis);
Files.copy(bis, dst.toPath());
bis.close();
}
private static boolean isCerner22File(String fileType) throws Exception {
if (fileType.equalsIgnoreCase("PPATI")
|| fileType.equalsIgnoreCase("PPREL")
|| fileType.equalsIgnoreCase("CDSEV")
|| fileType.equalsIgnoreCase("PPATH")
|| fileType.equalsIgnoreCase("RTTPE")
|| fileType.equalsIgnoreCase("AEATT")
|| fileType.equalsIgnoreCase("AEINV")
|| fileType.equalsIgnoreCase("AETRE")
|| fileType.equalsIgnoreCase("OPREF")
|| fileType.equalsIgnoreCase("OPATT")
|| fileType.equalsIgnoreCase("EALEN")
|| fileType.equalsIgnoreCase("EALSU")
|| fileType.equalsIgnoreCase("EALOF")
|| fileType.equalsIgnoreCase("HPSSP")
|| fileType.equalsIgnoreCase("IPEPI")
|| fileType.equalsIgnoreCase("IPWDS")
|| fileType.equalsIgnoreCase("DELIV")
|| fileType.equalsIgnoreCase("BIRTH")
|| fileType.equalsIgnoreCase("SCHAC")
|| fileType.equalsIgnoreCase("APPSL")
|| fileType.equalsIgnoreCase("DIAGN")
|| fileType.equalsIgnoreCase("PROCE")
|| fileType.equalsIgnoreCase("ORDER")
|| fileType.equalsIgnoreCase("DOCRP")
|| fileType.equalsIgnoreCase("DOCREF")
|| fileType.equalsIgnoreCase("CNTRQ")
|| fileType.equalsIgnoreCase("LETRS")
|| fileType.equalsIgnoreCase("LOREF")
|| fileType.equalsIgnoreCase("ORGREF")
|| fileType.equalsIgnoreCase("PRSNLREF")
|| fileType.equalsIgnoreCase("CVREF")
|| fileType.equalsIgnoreCase("NOMREF")
|| fileType.equalsIgnoreCase("EALIP")
|| fileType.equalsIgnoreCase("CLEVE")
|| fileType.equalsIgnoreCase("ENCNT")
|| fileType.equalsIgnoreCase("RESREF")
|| fileType.equalsIgnoreCase("PPNAM")
|| fileType.equalsIgnoreCase("PPADD")
|| fileType.equalsIgnoreCase("PPPHO")
|| fileType.equalsIgnoreCase("PPALI")
|| fileType.equalsIgnoreCase("PPINF")
|| fileType.equalsIgnoreCase("PPAGP")
|| fileType.equalsIgnoreCase("SURCC")
|| fileType.equalsIgnoreCase("SURCP")
|| fileType.equalsIgnoreCase("SURCA")
|| fileType.equalsIgnoreCase("SURCD")
|| fileType.equalsIgnoreCase("PDRES")
|| fileType.equalsIgnoreCase("PDREF")
|| fileType.equalsIgnoreCase("ABREF")
|| fileType.equalsIgnoreCase("CEPRS")
|| fileType.equalsIgnoreCase("ORDDT")
|| fileType.equalsIgnoreCase("STATREF")
|| fileType.equalsIgnoreCase("STATA")
|| fileType.equalsIgnoreCase("ENCINF")
|| fileType.equalsIgnoreCase("SCHDETAIL")
|| fileType.equalsIgnoreCase("SCHOFFER")
|| fileType.equalsIgnoreCase("PPGPORG")
|| fileType.equalsIgnoreCase("FAMILYHISTORY")) {
return true;
} else {
return false;
}
}
/*private static void fixSubscriberDbs() {
LOG.info("Fixing Subscriber DBs");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches");
List<UUID> batchIds = new ArrayList<>();
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
batchIds.add(batchId);
}
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Subscriber DBs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
*//*if (!referral.hasServiceRequested()) {
referral.getServiceRequested().clear();*//*
/*private static void fixReferralRequests() {
LOG.info("Fixing Referral Requests");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
Set<UUID> patientIdsToPost = new HashSet<>();
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper wrapper: wrappers) {
String resourceType = wrapper.getResourceType();
if (!resourceType.equals(ResourceType.ReferralRequest.toString())
|| wrapper.isDeleted()) {
continue;
}
String json = wrapper.getResourceData();
ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json);
continue;
}
CodeableConcept reason = referral.getServiceRequested().get(0);
referral.setReason(reason);
if (!referral.hasReason()) {
continue;
}
CodeableConcept reason = referral.getReason();
referral.setReason(null);
referral.addServiceRequested(reason);
json = FhirSerializationHelper.serializeResource(referral);
wrapper.setResourceData(json);
saveResourceWrapper(serviceId, wrapper);
//add to the set of patients we know need sending on to the protocol queue
patientIdsToPost.add(patientId);
LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId);
}
//if our patient has just been fixed or was fixed before, post onto the protocol queue
if (patientIdsToPost.contains(patientId)) {
List<UUID> batchIds = new ArrayList<>();
batchIds.add(batchId);
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
}
}
LOG.info("Finished Fixing Referral Requests");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void applyEmisAdminCaches() {
LOG.info("Applying Emis Admin Caches");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) {
LOG.info(" Service not started, so skipping");
continue;
}
//get exchanges
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
if (exchangeIds.isEmpty()) {
LOG.info(" No exchanges found, so skipping");
continue;
}
UUID firstExchangeId = exchangeIds.get(0);
List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId);
boolean appliedAdminCache = false;
for (ExchangeEvent event: events) {
if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) {
appliedAdminCache = true;
}
}
if (appliedAdminCache) {
LOG.info(" Have already applied admin cache, so skipping");
continue;
}
Exchange exchange = exchangeDal.getExchange(firstExchangeId);
String body = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(body);
if (files.length == 0) {
LOG.info(" No files in exchange " + firstExchangeId + " so skipping");
continue;
}
String firstFilePath = files[0];
String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension
String[] toks = name.split("_");
if (toks.length != 5) {
throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath);
}
String sharingAgreementGuid = toks[4];
List<UUID> batchIds = new ArrayList<>();
TransformError transformError = new TransformError();
FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds);
EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(),
fhirResourceFiler.getExchangeId(), sharingAgreementGuid,
true);
ExchangeTransformAudit transformAudit = new ExchangeTransformAudit();
transformAudit.setServiceId(serviceId);
transformAudit.setSystemId(endpointSystemId);
transformAudit.setExchangeId(firstExchangeId);
transformAudit.setId(UUID.randomUUID());
transformAudit.setStarted(new Date());
LOG.info(" Going to apply admin resource cache");
csvHelper.applyAdminResourceCache(fhirResourceFiler);
fhirResourceFiler.waitToFinish();
for (UUID batchId: batchIds) {
LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId);
}
transformAudit.setEnded(new Date());
transformAudit.setNumberBatchesCreated(new Integer(batchIds.size()));
boolean hadError = false;
if (transformError.getError().size() > 0) {
transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError));
hadError = true;
}
exchangeDal.save(transformAudit);
//clear down the cache of reference mappings since they won't be of much use for the next Exchange
IdHelper.clearCache();
if (hadError) {
LOG.error(" <<<<<<Error applying resource cache!");
continue;
}
//add the event to say we've applied the cache
AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache");
//post that ONE new batch ID onto the protocol queue
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished Applying Emis Admin Caches");
} catch (Throwable t) {
LOG.error("", t);
}
}
/**
* fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by
* replacing the "delete" extracts with newly generated deltas that can be processed
* before the re-bulk is done
*/
private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) {
LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(serviceOdsCode);
LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId());
/*File tempDirLast = new File(tempDir, "last");
if (!tempDirLast.exists()) {
if (!tempDirLast.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirLast);
}
tempDirLast.mkdirs();
}
File tempDirEmpty = new File(tempDir, "empty");
if (!tempDirEmpty.exists()) {
if (!tempDirEmpty.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirEmpty);
}
tempDirEmpty.mkdirs();
}*/
String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode);
File f = new File(tempDir);
if (f.exists()) {
FileUtils.deleteDirectory(f);
}
UUID serviceUuid = service.getId();
UUID systemUuid = UUID.fromString(systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
//get all the exchanges, which are returned in reverse order, so reverse for simplicity
List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>();
Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>();
//reverse the exchange list and cache the files for each one
List<Exchange> exchanges = new ArrayList<>();
for (int i=exchangesDesc.size()-1; i>=0; i
Exchange exchange = exchangesDesc.get(i);
String exchangeBody = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
//drop out and ignore any exchanges containing the singular bespoke reg status files
if (files.length <= 1) {
continue;
}
//drop out and ignore any exchanges for the left and dead extracts, since we don't
//expect to receive re-bulked data for the dead patients
String firstFile = files[0];
if (firstFile.indexOf("LEFT_AND_DEAD") > -1) {
continue;
}
exchanges.add(exchange);
//populate the map of the files with the shared storage prefix
List<String> fileList = Lists.newArrayList(files);
hmExchangeFiles.put(exchange, fileList);
//populate a map of the same files without the prefix
files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (int j=0; j<files.length; j++) {
String file = files[j].substring(sharedStoragePath.length() + 1);
files[j] = file;
}
fileList = Lists.newArrayList(files);
hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList);
}
/*exchanges.sort((o1, o2) -> {
Date d1 = o1.getTimestamp();
Date d2 = o2.getTimestamp();
return d1.compareTo(d2);
});*/
LOG.info("Found " + exchanges.size() + " exchanges and cached their files");
int indexDisabled = -1;
int indexRebulked = -1;
int indexOriginallyBulked = -1;
//go back through them to find the extract where the re-bulk is and when it was disabled
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
boolean disabled = isDisabledInSharingAgreementFile(files);
if (disabled) {
indexDisabled = i;
} else {
if (indexDisabled == -1) {
indexRebulked = i;
} else {
//if we've found a non-disabled extract older than the disabled ones,
//then we've gone far enough back
break;
}
}
}
//go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled)
for (int i=indexDisabled-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
boolean disabled = isDisabledInSharingAgreementFile(files);
if (disabled) {
break;
}
indexOriginallyBulked = i;
}
if (indexOriginallyBulked > -1) {
Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked);
LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId());
}
if (indexDisabled > -1) {
Exchange exchangeDisabled = exchanges.get(indexDisabled);
LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId());
}
if (indexRebulked > -1) {
Exchange exchangeRebulked = exchanges.get(indexRebulked);
LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId());
}
if (indexDisabled == -1
|| indexRebulked == -1
|| indexOriginallyBulked == -1) {
throw new Exception("Failed to find exchanges for original bulk (" + indexOriginallyBulked + ") disabling (" + indexDisabled + ") or re-bulking (" + indexRebulked + ")");
}
//continueOrQuit();
Exchange exchangeRebulked = exchanges.get(indexRebulked);
List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked);
List<String> tempFilesCreated = new ArrayList<>();
Set<String> patientGuidsDeletedOrTooOld = new HashSet<>();
for (String rebulkFile: rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (!isPatientFile(fileType)) {
continue;
}
LOG.info("Doing " + fileType);
String guidColumnName = getGuidColumnName(fileType);
//find all the guids in the re-bulk
Set<String> idsInRebulk = new HashSet<>();
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
String[] headers = null;
try {
headers = CsvHelper.getHeaderMapAsArray(csvParser);
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
//get the patient and row guid out of the file and cache in our set
String id = record.get("PatientGuid");
if (!Strings.isNullOrEmpty(guidColumnName)) {
id += "//" + record.get(guidColumnName);
}
idsInRebulk.add(id);
}
} finally {
csvParser.close();
}
LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile);
//create a replacement file for the exchange the service was disabled
String replacementDisabledFile = null;
Exchange exchangeDisabled = exchanges.get(indexDisabled);
List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled);
for (String s: disabledFiles) {
String disabledFileType = findFileType(s);
if (disabledFileType.equals(fileType)) {
replacementDisabledFile = FilenameUtils.concat(tempDir, s);
File dir = new File(replacementDisabledFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
tempFilesCreated.add(s);
LOG.info("Created replacement file " + replacementDisabledFile);
}
}
FileWriter fileWriter = new FileWriter(replacementDisabledFile);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
Set<String> pastIdsProcessed = new HashSet<>();
//now go through all files of the same type PRIOR to the service was disabled
//to find any rows that we'll need to explicitly delete because they were deleted while
//the extract was disabled
for (int i=indexDisabled-1; i>=indexOriginallyBulked; i
Exchange exchange = exchanges.get(i);
String originalFile = null;
List<String> files = hmExchangeFiles.get(exchange);
for (String s: files) {
String originalFileType = findFileType(s);
if (originalFileType.equals(fileType)) {
originalFile = s;
break;
}
}
if (originalFile == null) {
continue;
}
LOG.info(" Reading " + originalFile);
reader = FileHelper.readFileReaderFromSharedStorage(originalFile);
csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientGuid = record.get("PatientGuid");
//get the patient and row guid out of the file and cache in our set
String uniqueId = patientGuid;
if (!Strings.isNullOrEmpty(guidColumnName)) {
uniqueId += "//" + record.get(guidColumnName);
}
//if we're already handled this record in a more recent extract, then skip it
if (pastIdsProcessed.contains(uniqueId)) {
continue;
}
pastIdsProcessed.add(uniqueId);
//if this ID isn't deleted and isn't in the re-bulk then it means
//it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted
//from Emis Web while the extract feed was disabled
//if the record is deleted, then we won't expect it in the re-bulk
boolean deleted = Boolean.parseBoolean(record.get("Deleted"));
if (deleted) {
//if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes
if (fileType.equals("Admin_Patient")) {
patientGuidsDeletedOrTooOld.add(patientGuid);
}
continue;
}
//if it's not the patient file and we refer to a patient that we know
//has been deleted, then skip this row, since we know we're deleting the entire patient record
if (patientGuidsDeletedOrTooOld.contains(patientGuid)) {
continue;
}
//if the re-bulk contains a record matching this one, then it's OK
if (idsInRebulk.contains(uniqueId)) {
continue;
}
//the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago),
//so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped
if (fileType.equals("Admin_Patient")) {
//retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid);
if (patientUuid == null) {
throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]");
}
Patient patientResource = (Patient)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString());
if (patientResource.hasDeceased()) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too
EpisodeOfCare episodeResource = (EpisodeOfCare)resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString());
if (episodeResource.hasPeriod()
&& !PeriodHelper.isActive(episodeResource.getPeriod())) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
}
//create a new CSV record, carrying over the GUIDs from the original but marking as deleted
String[] newRecord = new String[headers.length];
for (int j=0; j<newRecord.length; j++) {
String header = headers[j];
if (header.equals("PatientGuid")
|| header.equals("OrganisationGuid")
|| (!Strings.isNullOrEmpty(guidColumnName)
&& header.equals(guidColumnName))) {
String val = record.get(header);
newRecord[j] = val;
} else if (header.equals("Deleted")) {
newRecord[j] = "true";
} else {
newRecord[j] = "";
}
}
csvPrinter.printRecord((Object[])newRecord);
csvPrinter.flush();
//log out the raw record that's missing from the original
StringBuffer sb = new StringBuffer();
sb.append("Record not in re-bulk: ");
for (int j=0; j<record.size(); j++) {
if (j > 0) {
sb.append(",");
}
sb.append(record.get(j));
}
LOG.info(sb.toString());
}
} finally {
csvParser.close();
}
}
csvPrinter.flush();
csvPrinter.close();
//also create a version of the CSV file with just the header and nothing else in
for (int i=indexDisabled+1; i<indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s: exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals(fileType)) {
String emptyTempFile = FilenameUtils.concat(tempDir, s);
File dir = new File(emptyTempFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
fileWriter = new FileWriter(emptyTempFile);
bufferedWriter = new BufferedWriter(fileWriter);
csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
csvPrinter.close();
tempFilesCreated.add(s);
LOG.info("Created empty file " + emptyTempFile);
}
}
}
}
//we also need to copy the restored sharing agreement file to replace all the period it was disabled
String rebulkedSharingAgreementFile = null;
for (String s: rebulkFiles) {
String fileType = findFileType(s);
if (fileType.equals("Agreements_SharingOrganisation")) {
rebulkedSharingAgreementFile = s;
}
}
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s: exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals("Agreements_SharingOrganisation")) {
String replacementFile = FilenameUtils.concat(tempDir, s);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile);
File replacementFileObj = new File(replacementFile);
Files.copy(inputStream, replacementFileObj.toPath());
inputStream.close();
tempFilesCreated.add(s);
}
}
}
//create a script to copy the files into S3
List<String> copyScript = new ArrayList<>();
copyScript.add("#!/bin/bash");
copyScript.add("");
for (String s: tempFilesCreated) {
String localFile = FilenameUtils.concat(tempDir, s);
copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s);
}
String scriptFile = FilenameUtils.concat(tempDir, "copy.sh");
FileUtils.writeLines(new File(scriptFile), copyScript);
LOG.info("Finished - written files to " + tempDir);
dumpFileSizes(new File(tempDir));
/*continueOrQuit();
//back up every file where the service was disabled
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
//first download from S3 to the local temp dir
InputStream inputStream = FileHelper.readFileFromSharedStorage(file);
String fileName = FilenameUtils.getName(file);
String tempPath = FilenameUtils.concat(tempDir, fileName);
File downloadDestination = new File(tempPath);
Files.copy(inputStream, downloadDestination.toPath());
//then write back to S3 in a sub-dir of the original file
String backupPath = FilenameUtils.getPath(file);
backupPath = FilenameUtils.concat(backupPath, "Original");
backupPath = FilenameUtils.concat(backupPath, fileName);
FileHelper.writeFileToSharedStorage(backupPath, downloadDestination);
LOG.info("Backed up " + file + " -> " + backupPath);
//delete from temp dir
downloadDestination.delete();
}
}
continueOrQuit();
//copy the new CSV files into the dir where it was disabled
List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled);
for (String disabledFile: disabledFiles) {
String fileType = findFileType(disabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected temp file " + f);
}
FileHelper.writeFileToSharedStorage(disabledFile, f);
LOG.info("Copied " + tempFile + " -> " + disabledFile);
}
continueOrQuit();
//empty the patient files for any extracts while the service was disabled
for (int i=indexDisabled+1; i<indexRebulked; i++) {
Exchange otherExchangeDisabled = exchanges.get(i);
List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled);
for (String otherDisabledFile: otherDisabledFiles) {
String fileType = findFileType(otherDisabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(otherDisabledFile, f);
LOG.info("Copied " + tempFile + " -> " + otherDisabledFile);
}
}
continueOrQuit();
//copy the content of the sharing agreement file from when it was re-bulked
for (String rebulkFile: rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File downloadDestination = new File(tempFile);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile);
Files.copy(inputStream, downloadDestination.toPath());
tempFilesCreated.add(tempFile);
}
}
//replace the sharing agreement file for all disabled extracts with the non-disabled one
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(file, f);
LOG.info("Copied " + tempFile + " -> " + file);
}
}
}
LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId);
continueOrQuit();
for (String tempFileCreated: tempFilesCreated) {
File f = new File(tempFileCreated);
if (f.exists()) {
f.delete();
}
}*/
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void dumpFileSizes(File f) {
if (f.isDirectory()) {
for (File child: f.listFiles()) {
dumpFileSizes(child);
}
} else {
String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length());
LOG.info("" + f + " = " + totalSizeReadable);
}
}
private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception {
List<String> files = fileMap.get(exchange);
String file = findSharingAgreementFile(files);
String name = FilenameUtils.getBaseName(file);
String[] toks = name.split("_");
return toks[3];
}
private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception {
String file = findSharingAgreementFile(files);
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
CSVRecord record = iterator.next();
String s = record.get("Disabled");
boolean disabled = Boolean.parseBoolean(s);
return disabled;
} finally {
csvParser.close();
}
}
private static void continueOrQuit() throws Exception {
LOG.info("Enter y to continue, anything else to quit");
byte[] bytes = new byte[10];
System.in.read(bytes);
char c = (char)bytes[0];
if (c != 'y' && c != 'Y') {
System.out.println("Read " + c);
System.exit(1);
}
}
private static String getGuidColumnName(String fileType) {
if (fileType.equals("Admin_Patient")) {
//patient file just has patient GUID, nothing extra
return null;
} else if (fileType.equals("CareRecord_Consultation")) {
return "ConsultationGuid";
} else if (fileType.equals("CareRecord_Diary")) {
return "DiaryGuid";
} else if (fileType.equals("CareRecord_Observation")) {
return "ObservationGuid";
} else if (fileType.equals("CareRecord_Problem")) {
//there is no separate problem GUID, as it's just a modified observation
return "ObservationGuid";
} else if (fileType.equals("Prescribing_DrugRecord")) {
return "DrugRecordGuid";
} else if (fileType.equals("Prescribing_IssueRecord")) {
return "IssueRecordGuid";
} else {
throw new IllegalArgumentException(fileType);
}
}
private static String findFileType(String filePath) {
String fileName = FilenameUtils.getName(filePath);
String[] toks = fileName.split("_");
String domain = toks[1];
String name = toks[2];
return domain + "_" + name;
}
private static boolean isPatientFile(String fileType) {
if (fileType.equals("Admin_Patient")
|| fileType.equals("CareRecord_Consultation")
|| fileType.equals("CareRecord_Diary")
|| fileType.equals("CareRecord_Observation")
|| fileType.equals("CareRecord_Problem")
|| fileType.equals("Prescribing_DrugRecord")
|| fileType.equals("Prescribing_IssueRecord")) {
//note the referral file doesn't have a Deleted column, so isn't in this list
return true;
} else {
return false;
}
}
private static String findSharingAgreementFile(List<String> files) throws Exception {
for (String file : files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
return file;
}
}
throw new Exception("Failed to find sharing agreement file in " + files.get(0));
}
private static void testSlack() {
LOG.info("Testing slack");
try {
SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader");
LOG.info("Finished testing slack");
} catch (Exception ex) {
LOG.error("", ex);
}
}
/*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) {
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath);
FileReader fr = new FileReader(filePath);
BufferedReader br = new BufferedReader(fr);
int count = 0;
List<UUID> exchangeIdBatch = new ArrayList<>();
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
UUID exchangeId = UUID.fromString(line);
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
if (audit != null
&& !audit.isResubmitted()) {
audit.setResubmitted(true);
auditRepository.save(audit);
}
count ++;
exchangeIdBatch.add(exchangeId);
if (exchangeIdBatch.size() >= 1000) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
exchangeIdBatch = new ArrayList<>();
LOG.info("Done " + count);
}
}
if (!exchangeIdBatch.isEmpty()) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
LOG.info("Done " + count);
}
br.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
/*private static void postToInbound(UUID serviceId, boolean all) {
LOG.info("Posting to inbound for " + serviceId);
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
List<UUID> systemIds = findSystemIds(service);
UUID systemId = systemIds.get(0);
ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId);
for (UUID exchangeId: errorState.getExchangeIdsInError()) {
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
//skip any exchange IDs we've already re-queued up to be processed again
if (audit.isResubmitted()) {
LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted");
continue;
}
LOG.debug("Re-posting " + audit.getExchangeId());
audit.setResubmitted(true);
auditRepository.save(audit);
//then re-submit the exchange to Rabbit MQ for the queue reader to pick up
QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false);
if (!all) {
LOG.info("Posted first exchange, so stopping");
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
private static void fixPatientSearchAllServices(String filterSystemId) {
LOG.info("Fixing patient search for all services and system " + filterSystemId);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
fixPatientSearch(service.getId().toString(), filterSystemId);
}
LOG.info("Finished Fixing patient search for all services and system " + filterSystemId);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixPatientSearch(String serviceId, String filterSystemId) {
LOG.info("Fixing patient search for service " + serviceId);
try {
UUID serviceUuid = UUID.fromString(serviceId);
UUID filterSystemUuid = null;
if (!Strings.isNullOrEmpty(filterSystemId)) {
filterSystemUuid = UUID.fromString(filterSystemId);
}
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Set<UUID> patientsDone = new HashSet<>();
Service service = serviceDal.getById(serviceUuid);
List<UUID> systemIds = findSystemIds(service);
for (UUID systemId: systemIds) {
if (filterSystemUuid != null
&& !filterSystemUuid.equals(systemId)) {
continue;
}
List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId);
LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId);
for (UUID exchangeId : exchanges) {
List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
if (patientsDone.contains(patientId)) {
continue;
}
patientsDone.add(patientId);
ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId);
if (wrapper != null) {
String json = wrapper.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json);
patientSearchDal.update(serviceUuid, fhirPatient);
}
}
if (patientsDone.size() % 1000 == 0) {
LOG.info("Done " + patientsDone.size());
}
}
}
}
LOG.info("Done " + patientsDone.size());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished fixing patient search for " + serviceId);
}
private static void runSql(String host, String username, String password, String sqlFile) {
LOG.info("Running SQL on " + host + " from " + sqlFile);
Connection conn = null;
Statement statement = null;
try {
File f = new File(sqlFile);
if (!f.exists()) {
LOG.error("" + f + " doesn't exist");
return;
}
List<String> lines = FileUtils.readLines(f);
/*String combined = String.join("\n", lines);
LOG.info("Going to run SQL");
LOG.info(combined);*/
//load driver
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", username);
props.setProperty("password", password);
conn = DriverManager.getConnection(host, props);
LOG.info("Opened connection");
statement = conn.createStatement();
long totalStart = System.currentTimeMillis();
for (String sql: lines) {
sql = sql.trim();
if (sql.startsWith("
/*private static void fixExchangeBatches() {
LOG.info("Starting Fixing Exchange Batches");
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
List<Service> services = serviceDalI.getAll();
for (Service service: services) {
LOG.info("Doing " + service.getName());
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId());
for (UUID exchangeId: exchangeIds) {
LOG.info(" Exchange " + exchangeId);
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId());
if (resources.isEmpty()) {
continue;
}
ResourceWrapper first = resources.get(0);
UUID patientId = first.getPatientId();
if (patientId != null) {
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchDalI.save(exchangeBatch);
LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId());
}
}
}
}
LOG.info("Finished Fixing Exchange Batches");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) {
LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath);
try {
File sourceFile = new File(sourceCsvPath);
CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
//"service_id","system_id","nhs_number","patient_id","count"
int count = 0;
HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>();
HashMap<UUID, Integer> patientIds = new HashMap<>();
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
count ++;
String serviceId = csvRecord.get("service_id");
String systemId = csvRecord.get("system_id");
String patientId = csvRecord.get("patient_id");
UUID serviceUuid = UUID.fromString(serviceId);
List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid);
if (systemIds == null) {
systemIds = new ArrayList<>();
serviceAndSystemIds.put(serviceUuid, systemIds);
}
systemIds.add(UUID.fromString(systemId));
patientIds.put(UUID.fromString(patientId), new Integer(count));
}
csvParser.close();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ParserPool parser = new ParserPool();
Map<Integer, List<Object[]>> patientRows = new HashMap<>();
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
for (UUID serviceId: serviceAndSystemIds.keySet()) {
//List<UUID> systemIds = serviceAndSystemIds.get(serviceId);
Service service = serviceDalI.getById(serviceId);
String serviceName = service.getName();
LOG.info("Doing service " + serviceId + " " + serviceName);
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId);
LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan");
int exchangeCount = 0;
for (UUID exchangeId: exchangeIds) {
exchangeCount ++;
if (exchangeCount % 1000 == 0) {
LOG.info("Done " + exchangeCount + " exchanges");
}
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
UUID patientId = exchangeBatch.getEdsPatientId();
if (patientId != null
&& !patientIds.containsKey(patientId)) {
continue;
}
Integer patientIdInt = patientIds.get(patientId);
//get encounters for exchange batch
UUID batchId = exchangeBatch.getBatchId();
List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper resourceWrapper: resourceWrappers) {
if (resourceWrapper.isDeleted()) {
continue;
}
String resourceType = resourceWrapper.getResourceType();
if (!resourceType.equals(ResourceType.Encounter.toString())) {
continue;
}
LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId());
String json = resourceWrapper.getResourceData();
Encounter fhirEncounter = (Encounter)parser.parse(json);
Date date = null;
if (fhirEncounter.hasPeriod()) {
Period period = fhirEncounter.getPeriod();
if (period.hasStart()) {
date = period.getStart();
}
}
String episodeId = null;
if (fhirEncounter.hasEpisodeOfCare()) {
Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0);
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference);
EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirEpisode != null) {
if (fhirEpisode.hasIdentifier()) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID);
if (Strings.isNullOrEmpty(episodeId)) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID);
}
}
}
}
String adtType = null;
String adtCode = null;
Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE);
if (extension != null) {
CodeableConcept codeableConcept = (CodeableConcept) extension.getValue();
Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE);
if (hl7MessageTypeCoding != null) {
adtType = hl7MessageTypeCoding.getDisplay();
adtCode = hl7MessageTypeCoding.getCode();
}
} else {
//for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body
try {
Exchange exchange = exchangeDalI.getExchange(exchangeId);
String exchangeBody = exchange.getBody();
Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody);
for (Bundle.BundleEntryComponent entry: bundle.getEntry()) {
if (entry.getResource() != null
&& entry.getResource() instanceof MessageHeader) {
MessageHeader header = (MessageHeader)entry.getResource();
if (header.hasEvent()) {
Coding coding = header.getEvent();
adtType = coding.getDisplay();
adtCode = coding.getCode();
}
}
}
} catch (Exception ex) {
//if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them
}
}
String cls = null;
if (fhirEncounter.hasClass_()) {
Encounter.EncounterClass encounterClass = fhirEncounter.getClass_();
if (encounterClass == Encounter.EncounterClass.OTHER
&& fhirEncounter.hasClass_Element()
&& fhirEncounter.getClass_Element().hasExtension()) {
for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) {
if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) {
//not 100% of the type of the value, so just append to a String
cls = "" + classExtension.getValue();
}
}
}
if (Strings.isNullOrEmpty(cls)) {
cls = encounterClass.toCode();
}
}
String type = null;
if (fhirEncounter.hasType()) {
//only seem to ever have one type
CodeableConcept codeableConcept = fhirEncounter.getType().get(0);
type = codeableConcept.getText();
}
String status = null;
if (fhirEncounter.hasStatus()) {
Encounter.EncounterState encounterState = fhirEncounter.getStatus();
status = encounterState.toCode();
}
String location = null;
String locationType = null;
if (fhirEncounter.hasLocation()) {
//first location is always the current location
Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0);
if (encounterLocation.hasLocation()) {
Reference locationReference = encounterLocation.getLocation();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference);
Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirLocation != null) {
if (fhirLocation.hasName()) {
location = fhirLocation.getName();
}
if (fhirLocation.hasType()) {
CodeableConcept typeCodeableConcept = fhirLocation.getType();
if (typeCodeableConcept.hasCoding()) {
Coding coding = typeCodeableConcept.getCoding().get(0);
locationType = coding.getDisplay();
}
}
}
}
}
String clinician = null;
if (fhirEncounter.hasParticipant()) {
//first participant seems to be the interesting one
Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0);
if (encounterParticipant.hasIndividual()) {
Reference practitionerReference = encounterParticipant.getIndividual();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference);
Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirPractitioner != null) {
if (fhirPractitioner.hasName()) {
HumanName name = fhirPractitioner.getName();
clinician = name.getText();
if (Strings.isNullOrEmpty(clinician)) {
clinician = "";
for (StringType s: name.getPrefix()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getGiven()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getFamily()) {
clinician += s.getValueNotNull();
clinician += " ";
}
clinician = clinician.trim();
}
}
}
}
}
Object[] row = new Object[12];
row[0] = serviceName;
row[1] = patientIdInt.toString();
row[2] = sdfOutput.format(date);
row[3] = episodeId;
row[4] = adtCode;
row[5] = adtType;
row[6] = cls;
row[7] = type;
row[8] = status;
row[9] = location;
row[10] = locationType;
row[11] = clinician;
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows == null) {
rows = new ArrayList<>();
patientRows.put(patientIdInt, rows);
}
rows.add(row);
}
}
}
}
String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"};
FileWriter fileWriter = new FileWriter(outputPath);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader(outputColumnHeaders)
.withQuote('"');
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format);
for (int i=0; i <= count; i++) {
Integer patientIdInt = new Integer(i);
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows != null) {
for (Object[] row: rows) {
csvPrinter.printRecord(row);
}
}
}
csvPrinter.close();
bufferedWriter.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath);
}*/
/*private static void registerShutdownHook() {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOG.info("");
try {
Thread.sleep(5000);
} catch (Throwable ex) {
LOG.error("", ex);
}
LOG.info("Done");
}
});
}*/
private static void findEmisStartDates(String path, String outputPath) {
LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss");
Map<String, Date> startDates = new HashMap<>();
Map<String, String> servers = new HashMap<>();
Map<String, String> names = new HashMap<>();
Map<String, String> odsCodes = new HashMap<>();
Map<String, String> cdbNumbers = new HashMap<>();
Map<String, Set<String>> distinctPatients = new HashMap<>();
File root = new File(path);
for (File sftpRoot: root.listFiles()) {
LOG.info("Checking " + sftpRoot);
Map<Date, File> extracts = new HashMap<>();
List<Date> extractDates = new ArrayList<>();
for (File extractRoot: sftpRoot.listFiles()) {
Date d = sdf.parse(extractRoot.getName());
//LOG.info("" + extractRoot.getName() + " -> " + d);
extracts.put(d, extractRoot);
extractDates.add(d);
}
Collections.sort(extractDates);
for (Date extractDate: extractDates) {
File extractRoot = extracts.get(extractDate);
LOG.info("Checking " + extractRoot);
//read the sharing agreements file
//e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv
File sharingAgreementsFile = null;
for (File f: extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("agreements_sharingorganisation") > -1
&& name.endsWith(".csv")) {
sharingAgreementsFile = f;
break;
}
}
if (sharingAgreementsFile == null) {
LOG.info("Null agreements file for " + extractRoot);
continue;
}
CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String activated = csvRecord.get("IsActivated");
String disabled = csvRecord.get("Disabled");
servers.put(orgGuid, sftpRoot.getName());
if (activated.equalsIgnoreCase("true")) {
if (disabled.equalsIgnoreCase("false")) {
Date d = sdf.parse(extractRoot.getName());
Date existingDate = startDates.get(orgGuid);
if (existingDate == null) {
startDates.put(orgGuid, d);
}
} else {
if (startDates.containsKey(orgGuid)) {
startDates.put(orgGuid, null);
}
}
}
}
} finally {
csvParser.close();
}
//go through orgs file to get name, ods and cdb codes
File orgsFile = null;
for (File f: extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_organisation_") > -1
&& name.endsWith(".csv")) {
orgsFile = f;
break;
}
}
csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String name = csvRecord.get("OrganisationName");
String odsCode = csvRecord.get("ODSCode");
String cdb = csvRecord.get("CDB");
names.put(orgGuid, name);
odsCodes.put(orgGuid, odsCode);
cdbNumbers.put(orgGuid, cdb);
}
} finally {
csvParser.close();
}
//go through patients file to get count
File patientFile = null;
for (File f: extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_patient_") > -1
&& name.endsWith(".csv")) {
patientFile = f;
break;
}
}
csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String patientGuid = csvRecord.get("PatientGuid");
String deleted = csvRecord.get("Deleted");
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
if (distinctPatientSet == null) {
distinctPatientSet = new HashSet<>();
distinctPatients.put(orgGuid, distinctPatientSet);
}
if (deleted.equalsIgnoreCase("true")) {
distinctPatientSet.remove(patientGuid);
} else {
distinctPatientSet.add(patientGuid);
}
}
} finally {
csvParser.close();
}
}
}
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd");
StringBuilder sb = new StringBuilder();
sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients");
for (String orgGuid: startDates.keySet()) {
Date startDate = startDates.get(orgGuid);
String server = servers.get(orgGuid);
String name = names.get(orgGuid);
String odsCode = odsCodes.get(orgGuid);
String cdbNumber = cdbNumbers.get(orgGuid);
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
String startDateDesc = null;
if (startDate != null) {
startDateDesc = sdfOutput.format(startDate);
}
Long countDistinctPatients = null;
if (distinctPatientSet != null) {
countDistinctPatients = new Long(distinctPatientSet.size());
}
sb.append("\n");
sb.append("\"" + name + "\"");
sb.append(",");
sb.append("\"" + odsCode + "\"");
sb.append(",");
sb.append("\"" + cdbNumber + "\"");
sb.append(",");
sb.append("\"" + orgGuid + "\"");
sb.append(",");
sb.append(startDateDesc);
sb.append(",");
sb.append("\"" + server + "\"");
sb.append(",");
sb.append(countDistinctPatients);
}
LOG.info(sb.toString());
FileUtils.writeStringToFile(new File(outputPath), sb.toString());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath);
}
private static void findEncounterTerms(String path, String outputPath) {
LOG.info("Finding Encounter Terms from " + path);
Map<String, Long> hmResults = new HashMap<>();
//source term, source term snomed ID, source term snomed term - count
try {
File root = new File(path);
File[] files = root.listFiles();
for (File readerRoot: files) { //emis001
LOG.info("Finding terms in " + readerRoot);
//first read in all the coding files to build up our map of codes
Map<String, String> hmCodes = new HashMap<>();
for (File dateFolder: readerRoot.listFiles()) {
LOG.info("Looking for codes in " + dateFolder);
File f = findFile(dateFolder, "Coding_ClinicalCode");
if (f == null) {
LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String codeId = csvRecord.get("CodeId");
String term = csvRecord.get("Term");
String snomed = csvRecord.get("SnomedCTConceptId");
hmCodes.put(codeId, snomed + ",\"" + term + "\"");
}
csvParser.close();
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date cutoff = dateFormat.parse("2017-01-01");
//now process the consultation files themselves
for (File dateFolder: readerRoot.listFiles()) {
LOG.info("Looking for consultations in " + dateFolder);
File f = findFile(dateFolder, "CareRecord_Consultation");
if (f == null) {
LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String term = csvRecord.get("ConsultationSourceTerm");
String codeId = csvRecord.get("ConsultationSourceCodeId");
if (Strings.isNullOrEmpty(term)
&& Strings.isNullOrEmpty(codeId)) {
continue;
}
String date = csvRecord.get("EffectiveDate");
if (Strings.isNullOrEmpty(date)) {
continue;
}
Date d = dateFormat.parse(date);
if (d.before(cutoff)) {
continue;
}
String line = "\"" + term + "\",";
if (!Strings.isNullOrEmpty(codeId)) {
String codeLookup = hmCodes.get(codeId);
if (codeLookup == null) {
LOG.error("Failed to find lookup for codeID " + codeId);
continue;
}
line += codeLookup;
} else {
line += ",";
}
Long count = hmResults.get(line);
if (count == null) {
count = new Long(1);
} else {
count = new Long(count.longValue() + 1);
}
hmResults.put(line, count);
}
csvParser.close();
}
}
//save results to file
StringBuilder output = new StringBuilder();
output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\"");
output.append("\r\n");
for (String line: hmResults.keySet()) {
Long count = hmResults.get(line);
String combined = line + "," + count;
output.append(combined);
output.append("\r\n");
}
LOG.info("FInished");
LOG.info(output.toString());
FileUtils.writeStringToFile(new File(outputPath), output.toString());
LOG.info("written output to " + outputPath);
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished finding Encounter Terms from " + path);
}
private static File findFile(File root, String token) throws Exception {
for (File f: root.listFiles()) {
String s = f.getName();
if (s.indexOf(token) > -1) {
return f;
}
}
return null;
}
/*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) {
LOG.info("Starting Populating Protocol Queue for " + serviceIdStr);
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
if (serviceIdStr.equalsIgnoreCase("All")) {
serviceIdStr = null;
}
try {
List<Service> services = new ArrayList<>();
if (Strings.isNullOrEmpty(serviceIdStr)) {
services = serviceRepository.getAll();
} else {
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
services.add(service);
}
for (Service service: services) {
List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId());
LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName());
if (startingExchangeId != null) {
UUID startingExchangeUuid = UUID.fromString(startingExchangeId);
if (exchangeIds.contains(startingExchangeUuid)) {
//if in the list, remove everything up to and including the starting exchange
int index = exchangeIds.indexOf(startingExchangeUuid);
LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point");
for (int i=index; i>=0; i--) {
exchangeIds.remove(i);
}
startingExchangeId = null;
} else {
//if not in the list, skip all these exchanges
LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping");
continue;
}
}
QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true);
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Populating Protocol Queue for " + serviceIdStr);
}*/
/*private static void findDeletedOrgs() {
LOG.info("Starting finding deleted orgs");
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
List<Service> services = new ArrayList<>();
try {
for (Service service: serviceRepository.getAll()) {
services.add(service);
}
} catch (Exception ex) {
LOG.error("", ex);
}
services.sort((o1, o2) -> {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareToIgnoreCase(name2);
});
for (Service service: services) {
try {
UUID serviceUuid = service.getId();
List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date());
LOG.info("Service: " + service.getName() + " " + service.getLocalId());
if (exchangeByServices.isEmpty()) {
LOG.info(" no exchange found!");
continue;
}
Exchange exchangeByService = exchangeByServices.get(0);
UUID exchangeId = exchangeByService.getId();
Exchange exchange = auditRepository.getExchange(exchangeId);
Map<String, String> headers = exchange.getHeaders();
String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid);
UUID systemUuid = UUID.fromString(systemUuidStr);
int batches = countBatches(exchangeId, serviceUuid, systemUuid);
LOG.info(" Most recent exchange had " + batches + " batches");
if (batches > 1 && batches < 2000) {
continue;
}
//go back until we find the FIRST exchange where it broke
exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date());
for (int i=0; i<exchangeByServices.size(); i++) {
exchangeByService = exchangeByServices.get(i);
exchangeId = exchangeByService.getId();
batches = countBatches(exchangeId, serviceUuid, systemUuid);
exchange = auditRepository.getExchange(exchangeId);
Date timestamp = exchange.getTimestamp();
if (batches < 1 || batches > 2000) {
LOG.info(" " + timestamp + " had " + batches);
}
if (batches > 1 && batches < 2000) {
LOG.info(" " + timestamp + " had " + batches);
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
LOG.info("Finished finding deleted orgs");
}*/
private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception {
int batches = 0;
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit: audits) {
if (audit.getNumberBatchesCreated() != null) {
batches += audit.getNumberBatchesCreated();
}
}
return batches;
}
/*private static void fixExchanges(UUID justThisService) {
LOG.info("Fixing exchanges");
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId : exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
boolean changed = false;
String body = exchange.getBody();
String[] files = body.split("\n");
if (files.length == 0) {
continue;
}
for (int i=0; i<files.length; i++) {
String original = files[i];
//remove /r characters
String trimmed = original.trim();
//add the new prefix
if (!trimmed.startsWith("sftpreader/EMIS001/")) {
trimmed = "sftpreader/EMIS001/" + trimmed;
}
if (!original.equals(trimmed)) {
files[i] = trimmed;
changed = true;
}
}
if (changed) {
LOG.info("Fixed exchange " + exchangeId);
LOG.info(body);
body = String.join("\n", files);
exchange.setBody(body);
AuditWriter.writeExchange(exchange);
}
}
}
LOG.info("Fixed exchanges");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void deleteDataForService(UUID serviceId) {
Service dbService = new ServiceRepository().getById(serviceId);
//the delete will take some time, so do the delete in a separate thread
LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId());
FhirDeletionService deletor = new FhirDeletionService(dbService);
try {
deletor.deleteData();
LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId());
} catch (Exception ex) {
LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex);
}
}*/
/*private static void testLogging() {
while (true) {
System.out.println("Checking logging at " + System.currentTimeMillis());
try {
Thread.sleep(4000);
} catch (Exception e) {
e.printStackTrace();
}
LOG.trace("trace logging");
LOG.debug("debug logging");
LOG.info("info logging");
LOG.warn("warn logging");
LOG.error("error logging");
}
}
*/
/*private static void fixExchangeProtocols() {
LOG.info("Fixing exchange protocols");
AuditRepository auditRepository = new AuditRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
LOG.info("Processing exchange " + exchangeId);
Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
List<String> newIds = new ArrayList<>();
String protocolJson = headers.get(HeaderKeys.Protocols);
if (!headers.containsKey(HeaderKeys.Protocols)) {
try {
List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr);
// Get protocols where service is publisher
newIds = libraryItemList.stream()
.filter(
libraryItem -> libraryItem.getProtocol().getServiceContract().stream()
.anyMatch(sc ->
sc.getType().equals(ServiceContractType.PUBLISHER)
&& sc.getService().getUuid().equals(serviceIdStr)))
.map(t -> t.getUuid().toString())
.collect(Collectors.toList());
} catch (Exception e) {
LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e);
continue;
}
} else {
try {
JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson);
for (int i = 0; i < node.size(); i++) {
JsonNode libraryItemNode = node.get(i);
JsonNode idNode = libraryItemNode.get("uuid");
String id = idNode.asText();
newIds.add(id);
}
} catch (Exception e) {
LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e);
continue;
}
}
try {
if (newIds.isEmpty()) {
headers.remove(HeaderKeys.Protocols);
} else {
String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray());
headers.put(HeaderKeys.Protocols, protocolsJson);
}
} catch (JsonProcessingException e) {
LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e);
continue;
}
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
} catch (JsonProcessingException e) {
LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e);
continue;
}
auditRepository.save(exchange);
}
LOG.info("Finished fixing exchange protocols");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
if (headers.containsKey(HeaderKeys.SenderLocalIdentifier)
&& headers.containsKey(HeaderKeys.SenderOrganisationUuid)) {
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
Map<UUID, String> orgMap = service.getOrganisations();
if (orgMap.size() != 1) {
LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId());
continue;
}
UUID orgId = orgMap
.keySet()
.stream()
.collect(StreamExtension.firstOrNullCollector());
Organisation organisation = organisationRepository.getById(orgId);
String odsCode = organisation.getNationalId();
headers.put(HeaderKeys.SenderLocalIdentifier, odsCode);
headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString());
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Creating exchange " + exchange.getExchangeId());
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
LibraryRepository libraryRepository = new LibraryRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
boolean changed = false;
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
try {
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint : endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())) {
if (!headers.containsKey(HeaderKeys.SourceSystem)) {
headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SystemVersion)) {
headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) {
headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString());
changed = true;
}
}
}
}
} catch (Exception e) {
LOG.error("Failed to find endpoint details for " + exchange.getExchangeId());
continue;
}
if (changed) {
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Fixed exchange " + exchange.getExchangeId());
}
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void testConnection(String configName) {
try {
JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise");
String driverClass = config.get("driverClass").asText();
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName(driverClass);
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
/*private static void testConnection() {
try {
JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise");
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName("org.postgresql.Driver");
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
*//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) {
}*//*
/*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception {
LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom);
LOG.info("Testing database connection");
testConnection(configName);
Service service = new ServiceRepository().getById(serviceId);
List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet());
UUID orgId = orgIds.get(0);
List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE);
for (int i=exchangeByServiceList.size()-1; i>=0; i--) {
ExchangeByService exchangeByService = exchangeByServiceList.get(i);
//for (ExchangeByService exchangeByService: exchangeByServiceList) {
UUID exchangeId = exchangeByService.getExchangeId();
if (exchangeIdStartFrom != null) {
if (!exchangeIdStartFrom.equals(exchangeId)) {
continue;
} else {
//once we have a match, set to null so we don't skip any subsequent ones
exchangeIdStartFrom = null;
}
}
Exchange exchange = AuditWriter.readExchange(exchangeId);
String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid);
UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr);
//this one had 90,000 batches and doesn't need doing again
LOG.info("Skipping exchange " + exchangeId);
continue;
List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId);
LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches");
for (int j=0; j<exchangeBatches.size(); j++) {
ExchangeBatch exchangeBatch = exchangeBatches.get(j);
UUID batchId = exchangeBatch.getBatchId();
if (batchIdStartFrom != null) {
if (!batchIdStartFrom.equals(batchId)) {
continue;
} else {
batchIdStartFrom = null;
}
}
LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size());
try {
String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null);
if (!Strings.isNullOrEmpty(outbound)) {
EnterpriseFiler.file(outbound, configName);
}
} catch (Exception ex) {
throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex);
}
}
}
}*/
/*private static void fixMissingExchanges() {
LOG.info("Fixing missing exchanges");
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;");
stmt.setFetchSize(100);
Set<UUID> exchangeIdsDone = new HashSet<>();
AuditRepository auditRepository = new AuditRepository();
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
UUID batchId = row.get(1, UUID.class);
Date date = row.getTimestamp(2);
//LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date);
if (exchangeIdsDone.contains(exchangeId)) {
continue;
}
if (auditRepository.getExchange(exchangeId) != null) {
continue;
}
UUID serviceId = findServiceId(batchId, session);
if (serviceId == null) {
continue;
}
Exchange exchange = new Exchange();
ExchangeByService exchangeByService = new ExchangeByService();
ExchangeEvent exchangeEvent = new ExchangeEvent();
Map<String, String> headers = new HashMap<>();
headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString());
String headersJson = null;
try {
headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setBody("Body not available, as exchange re-created");
exchange.setExchangeId(exchangeId);
exchange.setHeaders(headersJson);
exchange.setTimestamp(date);
exchangeByService.setExchangeId(exchangeId);
exchangeByService.setServiceId(serviceId);
exchangeByService.setTimestamp(date);
exchangeEvent.setEventDesc("Created_By_Conversion");
exchangeEvent.setExchangeId(exchangeId);
exchangeEvent.setTimestamp(new Date());
auditRepository.save(exchange);
auditRepository.save(exchangeEvent);
auditRepository.save(exchangeByService);
exchangeIdsDone.add(exchangeId);
LOG.info("Creating exchange " + exchangeId);
}
LOG.info("Finished exchange fix");
}
private static UUID findServiceId(UUID batchId, Session session) {
Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;");
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId);
return null;
}
Row row = rs.one();
String resourceType = row.getString(0);
UUID resourceId = row.get(1, UUID.class);
stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;");
rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId);
return null;
}
row = rs.one();
UUID serviceId = row.get(0, UUID.class);
return serviceId;
}*/
/*private static void fixExchangeEvents() {
List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents();
for (ExchangeEvent event: events) {
if (event.getEventDesc() != null) {
continue;
}
String eventDesc = "";
int eventType = event.getEvent().intValue();
switch (eventType) {
case 1:
eventDesc = "Receive";
break;
case 2:
eventDesc = "Validate";
break;
case 3:
eventDesc = "Transform_Start";
break;
case 4:
eventDesc = "Transform_End";
break;
case 5:
eventDesc = "Send";
break;
default:
eventDesc = "??? " + eventType;
}
event.setEventDesc(eventDesc);
new AuditRepository().save(null, event);
}
}*/
*//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid);
}*//*
/*private static void fixExchanges() {
AuditRepository auditRepository = new AuditRepository();
Map<UUID, Set<UUID>> existingOnes = new HashMap();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
List<Exchange> exchanges = auditRepository.getAllExchanges();
for (Exchange exchange: exchanges) {
UUID exchangeUuid = exchange.getExchangeId();
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson);
continue;
}
if (serviceId == null) {
LOG.warn("No service ID found for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceUuid = UUID.fromString(serviceId);
Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid);
if (exchangeIdsDone == null) {
exchangeIdsDone = new HashSet<>();
List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE);
for (ExchangeByService exchangeByService: exchangeByServices) {
exchangeIdsDone.add(exchangeByService.getExchangeId());
}
existingOnes.put(serviceUuid, exchangeIdsDone);
}
//create the exchange by service entity
if (!exchangeIdsDone.contains(exchangeUuid)) {
Date timestamp = exchange.getTimestamp();
ExchangeByService newOne = new ExchangeByService();
newOne.setExchangeId(exchangeUuid);
newOne.setServiceId(serviceUuid);
newOne.setTimestamp(timestamp);
auditRepository.save(newOne);
try {
headers.remove(HeaderKeys.BatchIdsJson);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
if (!headers.containsKey(HeaderKeys.BatchIdsJson)) {
//fix the batch IDs not being in the exchange
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid);
if (!batches.isEmpty()) {
List<UUID> batchUuids = batches
.stream()
.map(t -> t.getBatchId())
.collect(Collectors.toList());
try {
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray());
headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange, null);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
}
//}
}
}*/
/*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException {
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
LibraryRepository libraryRepository = new LibraryRepository();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())
&& technicalInterface.getMessageFormat().equalsIgnoreCase(software)
&& technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) {
return endpointSystemId;
}
}
}
} catch (Exception e) {
throw new PipelineException("Failed to process endpoints from service " + service.getId());
}
return null;
}
*/
/*private static void addSystemIdToExchangeHeaders() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as no service UUID");
continue;
}
if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as already got system UUID");
continue;
}
try {
//work out service ID
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
UUID serviceId = UUID.fromString(serviceIdStr);
String software = headers.get(HeaderKeys.SourceSystem);
String version = headers.get(HeaderKeys.SystemVersion);
Service service = serviceRepository.getById(serviceId);
UUID systemUuid = findSystemId(service, software, version);
headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString());
//work out protocol IDs
try {
String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr);
headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson);
} catch (Exception ex) {
LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage());
}
//save to DB
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}*/
/*private static void populateExchangeBatchPatients() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
//ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))
|| Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " because no service or system in header");
continue;
}
try {
UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid));
UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid));
List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch : exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
UUID batchId = exchangeBatch.getBatchId();
List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString());
if (resourceWrappers.isEmpty()) {
continue;
}
List<UUID> patientIds = new ArrayList<>();
for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) {
UUID patientId = resourceWrapper.getResourceId();
if (resourceWrapper.getIsDeleted()) {
deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId);
}
if (!patientIds.contains(patientId)) {
patientIds.add(patientId);
}
}
if (patientIds.size() != 1) {
LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs");
continue;
}
UUID patientId = patientIds.get(0);
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchRepository.save(exchangeBatch);
}
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}
private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception {
FhirStorageService storageService = new FhirStorageService(serviceId, systemId);
ResourceRepository resourceRepository = new ResourceRepository();
List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId);
for (ResourceByPatient resourceWrapper: resourceWrappers) {
String json = resourceWrapper.getResourceData();
Resource resource = new JsonParser().parse(json);
storageService.exchangeBatchDelete(exchangeId, batchId, resource);
}
}*/
/*private static void convertPatientSearch() {
LOG.info("Converting Patient Search");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData());
String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient());
ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId));
if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) {
continue;
}
Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData());
PatientSearchHelper.update(serviceId, systemId, patient);
PatientSearchHelper.update(serviceId, systemId, episodeOfCare);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Search");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static List<UUID> findSystemIds(Service service) throws Exception {
List<UUID> ret = new ArrayList<>();
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
ret.add(endpointSystemId);
}
} catch (Exception e) {
throw new Exception("Failed to process endpoints from service " + service.getId());
}
return ret;
}
/*private static void convertPatientLink() {
LOG.info("Converting Patient Link");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData());
PatientLinkHelper.updatePersonId(patient);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Link");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, ResourceHistory> resourcesFixed = new HashMap<>();
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>();
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
ProblemPreTransformer.transform(version, parsers, filer, helper);
ObservationPreTransformer.transform(version, parsers, filer, helper);
DrugRecordPreTransformer.transform(version, parsers, filer, helper);
IssueRecordPreTransformer.transform(version, parsers, filer, helper);
DiaryPreTransformer.transform(version, parsers, filer, helper);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential()
&& !patientParser.getDeleted()) {
PatientTransformer.createResource(patientParser, filer, helper, version);
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
ConsultationTransformer.createResource(consultationParser, filer, helper, version);
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
ObservationTransformer.createResource(observationParser, filer, helper, version);
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
DiaryTransformer.createResource(diaryParser, filer, helper, version);
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version);
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version);
}
}
issueRecordParser.close();
filer.waitToFinish(); //just to close the thread pool, even though it's not been used
List<Resource> resources = filer.getNewResources();
for (Resource resource: resources) {
String patientId = IdHelper.getPatientId(resource);
UUID edsPatientId = UUID.fromString(patientId);
ResourceType resourceType = resource.getResourceType();
UUID resourceId = UUID.fromString(resource.getId());
boolean foundResourceInDbBatch = false;
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds != null) {
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
}
foundResourceInDbBatch = true;
for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
LOG.warn("JSON already in resource " + resourceType + " " + resourceId);
} else {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid);
}
resourceHistory.setIsDeleted(false);
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceHistory.setSchemaVersion("0.1");
resourceRepository.save(resourceByExchangeBatch);
resourceRepository.save(resourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
String key = resourceType.toString() + ":" + resourceId;
resourcesFixed.put(key, resourceHistory);
}
//if a patient became confidential, we will have deleted all resources for that
//patient, so we need to undo that too
//to undelete WHOLE patient record
//1. if THIS resource is a patient
//2. get all other deletes from the same exchange batch
//3. delete those from resource_by_exchange_batch (the deleted ones only)
//4. delete same ones from resource_history
//5. retrieve most recent resource_history
//6. if not deleted, add to resources fixed
if (resourceType == ResourceType.Patient) {
List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId);
LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId);
for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) {
if (!resourceInSameBatch.getIsDeleted()) {
continue;
}
//patient and episode resources will be restored by the above stuff, so don't try
//to do it again
if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString())
|| resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion());
mapperResourceByExchangeBatch.delete(resourceInSameBatch);
mapperResourceHistory.delete(deletedResourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
//check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table
ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId());
if (mostRecentDeletedResourceHistory != null
&& !mostRecentDeletedResourceHistory.getIsDeleted()) {
String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId();
resourcesFixed.put(key2, mostRecentDeletedResourceHistory);
}
}
}
}
}
}
//if we didn't find records in the DB to update, then
if (!foundResourceInDbBatch) {
//we can't generate a back-dated time UUID, but we need one so the resource_history
//table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange,
//and the batch ID is actually a time UUID that was allocated around the right time
ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId);
//if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange
//and we'll pick up the same patient data in a following exchange
if (firstBatch == null) {
continue;
}
UUID versionUuid = firstBatch.getBatchId();
//find suitable batch ID
UUID batchId = null;
if (batchIds != null
&& batchIds.size() > 0) {
batchId = batchIds.get(batchIds.size()-1);
} else {
//create new batch ID if not found
ExchangeBatch exchangeBatch = new ExchangeBatch();
exchangeBatch.setBatchId(UUIDs.timeBased());
exchangeBatch.setExchangeId(exchangeId);
exchangeBatch.setInsertedAt(new Date());
exchangeBatch.setEdsPatientId(edsPatientId);
exchangeBatchRepository.save(exchangeBatch);
batchId = exchangeBatch.getBatchId();
//add to map for next resource
if (batchIds == null) {
batchIds = new ArrayList<>();
}
batchIds.add(batchId);
batchesPerPatient.put(edsPatientId, batchIds);
}
String json = parserPool.composeString(resource);
ResourceHistory resourceHistory = new ResourceHistory();
resourceHistory.setResourceId(resourceId);
resourceHistory.setResourceType(resourceType.toString());
resourceHistory.setVersion(versionUuid);
resourceHistory.setCreatedAt(new Date());
resourceHistory.setServiceId(serviceId);
resourceHistory.setSystemId(systemId);
resourceHistory.setIsDeleted(false);
resourceHistory.setSchemaVersion("0.1");
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch();
resourceByExchangeBatch.setBatchId(batchId);
resourceByExchangeBatch.setExchangeId(exchangeId);
resourceByExchangeBatch.setResourceType(resourceType.toString());
resourceByExchangeBatch.setResourceId(resourceId);
resourceByExchangeBatch.setVersion(versionUuid);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
resourceByExchangeBatch.setResourceData(json);
resourceRepository.save(resourceHistory);
resourceRepository.save(resourceByExchangeBatch);
batchIdsToPutInProtocolQueue.add(batchId);
}
}
if (!batchIdsToPutInProtocolQueue.isEmpty()) {
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue);
}
}
//update the resource_by_service table (and the resource_by_patient view)
for (ResourceHistory resourceHistory: resourcesFixed.values()) {
UUID latestVersionUpdatedUuid = resourceHistory.getVersion();
ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId());
UUID latestVersionUuid = latestVersion.getVersion();
//if there have been subsequent updates to the resource, then skip it
if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) {
continue;
}
Resource resource = parserPool.parse(resourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment)metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(resourceHistory.getServiceId());
resourceByService.setSystemId(resourceHistory.getSystemId());
resourceByService.setResourceType(resourceHistory.getResourceType());
resourceByService.setResourceId(resourceHistory.getResourceId());
resourceByService.setCurrentVersion(resourceHistory.getVersion());
resourceByService.setUpdatedAt(resourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(resourceHistory.getResourceData());
resourceRepository.save(resourceByService);
//call out to our patient search and person matching services
if (resource instanceof Patient) {
PatientLinkHelper.updatePersonId((Patient)resource);
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource);
} else if (resource instanceof EpisodeOfCare) {
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource);
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) {
LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers);
//find any deleted patients
List<UUID> deletedPatientUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getDeleted()) {
//find the EDS patient ID for this local guid
String patientGuid = patientParser.getPatientGuid();
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
deletedPatientUuids.add(edsPatientId);
}
}
patientParser.close();
//go through the appts file to find properly deleted appt GUIDS
List<UUID> deletedApptUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class);
while (apptParser.nextRecord()) {
if (apptParser.getDeleted()) {
String patientGuid = apptParser.getPatientGuid();
String slotGuid = apptParser.getSlotGuid();
if (!Strings.isNullOrEmpty(patientGuid)) {
String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid);
UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId);
deletedApptUuids.add(edsApptId);
}
}
}
apptParser.close();
for (UUID edsPatientId : deletedPatientUuids) {
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
}
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString());
for (ResourceByExchangeBatch apptWrapper : apptWrappers) {
//ignore non-deleted appts
if (!apptWrapper.getIsDeleted()) {
continue;
}
//if the appt was deleted legitamately, then skip it
UUID apptId = apptWrapper.getResourceId();
if (deletedApptUuids.contains(apptId)) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion());
if (saveChanges) {
mapperResourceByExchangeBatch.delete(apptWrapper);
mapperResourceHistory.delete(deletedResourceHistory);
}
LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId);
//now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table
ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId());
if (mostRecentResourceHistory != null
&& !mostRecentResourceHistory.getIsDeleted()) {
Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment) metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(mostRecentResourceHistory.getServiceId());
resourceByService.setSystemId(mostRecentResourceHistory.getSystemId());
resourceByService.setResourceType(mostRecentResourceHistory.getResourceType());
resourceByService.setResourceId(mostRecentResourceHistory.getResourceId());
resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion());
resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(mostRecentResourceHistory.getResourceData());
if (saveChanges) {
resourceRepository.save(resourceByService);
}
LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table");
}
}
}
}
}
}
LOG.info("Finished Deleted Appointments Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void fixSlotReferencesForPublisher(String publisher) {
try {
ServiceDalI dal = DalProvider.factoryServiceDal();
List<Service> services = dal.getAll();
for (Service service: services) {
if (service.getPublisherConfigName() != null
&& service.getPublisherConfigName().equals(publisher)) {
fixSlotReferences(service.getId());
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void fixSlotReferences(UUID serviceId) {
LOG.info("Fixing Slot References in Appointments for " + serviceId);
try {
//get patient IDs from patient search
List<UUID> patientIds = new ArrayList<>();
EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
String sql = "SELECT eds_id FROM publisher_transform_02.resource_id_map WHERE service_id = '" + serviceId + "'AND resource_type = '" + ResourceType.Patient + "';";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientUuid = rs.getString(1);
patientIds.add(UUID.fromString(patientUuid));
}
rs.close();
statement.close();
connection.close();
/*
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId.toString() + "'";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientUuid = rs.getString(1);
patientIds.add(UUID.fromString(patientUuid));
}
rs.close();
statement.close();
connection.close();*/
LOG.debug("Found " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true);
//for each patient
for (UUID patientUuid: patientIds) {
//LOG.debug("Checking patient " + patientUuid);
//get all appointment resources
List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString());
for (ResourceWrapper apptWrapper: appointmentWrappers) {
//LOG.debug("Checking appointment " + apptWrapper.getResourceId());
List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId());
//the above returns most recent first, but we want to do them in order
historyWrappers = Lists.reverse(historyWrappers);
for (ResourceWrapper historyWrapper : historyWrappers) {
if (historyWrapper.isDeleted()) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted");
continue;
}
String json = historyWrapper.getResourceData();
Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json);
if (!appt.hasSlot()) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot");
continue;
}
if (appt.getSlot().size() != 1) {
throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs");
}
Reference slotRef = appt.getSlot().get(0);
//test if slot reference exists
Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef);
String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef);
if (slotSourceId.indexOf(":") > -1) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot");
continue;
}
//if not, correct slot reference
Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId());
Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference);
String sourceId = ReferenceHelper.getReferenceId(apptLocalReference);
Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId);
Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper);
String slotEdsReferenceValue = slotEdsReference.getReference();
String oldSlotRefValue = slotRef.getReference();
slotRef.setReference(slotEdsReferenceValue);
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue);
//save appointment
json = FhirSerializationHelper.serializeResource(appt);
historyWrapper.setResourceData(json);
saveResourceWrapper(serviceId, historyWrapper);
fixed++;
}
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts");
}
}
LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts");
LOG.info("Finished Fixing Slot References in Appointments for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}
/*private static void fixReviews(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, Long> problemCodes = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (problemParser.nextRecord()) {
String patientGuid = problemParser.getPatientGuid();
String observationGuid = problemParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (!problemCodes.containsKey(key)) {
problemCodes.put(key, null);
}
}
problemParser.close();
while (observationParser.nextRecord()) {
String patientGuid = observationParser.getPatientGuid();
String observationGuid = observationParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (problemCodes.containsKey(key)) {
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
problemCodes.put(key, codeId);
}
}
observationParser.close();
LOG.info("Found " + problemCodes.size() + " problem codes so far");
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
while (observationParser.nextRecord()) {
String problemGuid = observationParser.getProblemGuid();
if (!Strings.isNullOrEmpty(problemGuid)) {
String patientGuid = observationParser.getPatientGuid();
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
String key = patientGuid + ":" + problemGuid;
Long problemCodeId = problemCodes.get(key);
if (problemCodeId == null
|| problemCodeId.longValue() != codeId.longValue()) {
continue;
}
//if here, our code is the same as the problem, so it's a review
String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid();
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper);
for (UUID systemId: systemIds) {
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
if (edsObservationId == null) {
//try observations as diagnostic reports, because it could be one of those instead
if (resourceType == ResourceType.Observation) {
resourceType = ResourceType.DiagnosticReport;
edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
}
if (edsObservationId == null) {
throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId);
}
}
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
//throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId);
}
for (UUID batchId: batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
//throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId);
}
for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (Strings.isNullOrEmpty(json)) {
throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId);
}
Resource resource = parserPool.parse(json);
if (addReviewExtension((DomainResource)resource)) {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId);
resourceRepository.save(resourceByExchangeBatch);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid);
}
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceRepository.save(resourceHistory);
ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId);
if (resourceByService != null) {
UUID serviceVersionUuid = resourceByService.getCurrentVersion();
if (serviceVersionUuid.equals(versionUuid)) {
resourceByService.setResourceData(json);
resourceRepository.save(resourceByService);
}
}
} else {
LOG.info("" + resourceType + " " + edsObservationId + " already has extension");
}
}
}
}
//1. find out resource type originall saved from
//2. retrieve from resource_by_exchange_batch
//3. update resource in resource_by_exchange_batch
//4. retrieve from resource_history
//5. update resource_history
//6. retrieve record from resource_by_service
//7. if resource_by_service version UUID matches the resource_history updated, then update that too
}
}
observationParser.close();
}
}
LOG.info("Finished Fixing Reviews");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static boolean addReviewExtension(DomainResource resource) {
if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) {
return false;
}
Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true));
resource.addExtension(extension);
return true;
}*/
/*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
//once we match the servce, set this to null to do all other services
justThisService = null;
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<String> interestingPatientGuids = new ArrayList<>();
Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient);
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential() || patientParser.getDeleted()) {
interestingPatientGuids.add(patientParser.getPatientGuid());
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
interestingPatientGuids.add(consultationParser.getPatientGuid());
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
interestingPatientGuids.add(observationParser.getPatientGuid());
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
interestingPatientGuids.add(diaryParser.getPatientGuid());
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
interestingPatientGuids.add(drugRecordParser.getPatientGuid());
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
interestingPatientGuids.add(issueRecordParser.getPatientGuid());
}
}
issueRecordParser.close();
}
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
for (String interestingPatientGuid: interestingPatientGuids) {
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid);
}
for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) {
Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId);
List<UUID> batches = batchesPerPatient.get(edsPatientId);
if (batches != null) {
Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
if (batchesForExchange == null) {
batchesForExchange = new HashSet<>();
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange);
}
batchesForExchange.addAll(batches);
}
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Running Protocols for Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixOrgs() {
LOG.info("Posting orgs to protocol queue");
String[] orgIds = new String[]{
"332f31a2-7b28-47cb-af6f-18f65440d43d",
"c893d66b-eb89-4657-9f53-94c5867e7ed9"};
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>();
for (String orgId: orgIds) {
LOG.info("Doing org ID " + orgId);
UUID orgUuid = UUID.fromString(orgId);
try {
//select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING;
ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid);
UUID batchId = resourceByExchangeBatch.getBatchId();
//select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING;
ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId);
UUID exchangeId = exchangeBatch.getExchangeId();
Set<UUID> list = exchangeBatches.get(exchangeId);
if (list == null) {
list = new HashSet<>();
exchangeBatches.put(exchangeId, list);
}
list.add(batchId);
} catch (Exception ex) {
LOG.error("", ex);
break;
}
}
try {
//find the config for our protocol queue (which is in the inbound config)
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatches.keySet()) {
Set<UUID> batchIds = exchangeBatches.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
} catch (Exception ex) {
LOG.error("", ex);
return;
}
LOG.info("Finished posting orgs to protocol queue");
}*/
/*private static void findCodes() {
LOG.info("Finding missing codes");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID serviceId = row.get(0, UUID.class);
UUID systemId = row.get(1, UUID.class);
UUID exchangeId = row.get(2, UUID.class);
UUID version = row.get(3, UUID.class);
ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version);
String xml = audit.getErrorXml();
if (xml == null) {
continue;
}
String codePrefix = "Failed to find clinical code CodeableConcept for codeId ";
int codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " clinical code " + code + " from " + audit.getStarted());
continue;
}
codePrefix = "Failed to find medication CodeableConcept for codeId ";
codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " drug code " + code + " from " + audit.getStarted());
continue;
}
}
LOG.info("Finished finding missing codes");
}*/
private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating TPP Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createTppSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating TPP Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
//LOG.info("Doing dir " + sourceFile);
createTppSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
Charset encoding = Charset.forName("CP1252");
InputStreamReader reader =
new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)), encoding);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader();
CSVParser parser = new CSVParser(reader, format);
String filterColumn = null;
Map<String, Integer> headerMap = parser.getHeaderMap();
if (headerMap.containsKey("IDPatient")) {
filterColumn = "IDPatient";
} else if (name.equalsIgnoreCase("SRPatient.csv")) {
filterColumn = "RowIdentifier";
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFile), encoding));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
/*} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
copyFile(sourceFile, destFile);
}*/
}
}
}
private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Vision Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createVisionSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating Vision Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createVisionSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL);
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
if (name.contains("encounter_data") || name.contains("journal_data") ||
name.contains("patient_data") || name.contains("referral_data")) {
filterColumn = 0;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Homerton Subset");
try {
Set<String> PersonIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
PersonIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createHomertonSubsetForFile(sourceDir, destDir, PersonIds);
LOG.info("Finished Creating Homerton Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createHomertonSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withHeader();
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//PersonId column at 1
if (name.contains("ENCOUNTER") || name.contains("PATIENT")) {
filterColumn = 1;
} else if (name.contains("DIAGNOSIS")) {
//PersonId column at 13
filterColumn = 13;
} else if (name.contains("ALLERGY")) {
//PersonId column at 2
filterColumn = 2;
} else if (name.contains("PROBLEM")) {
//PersonId column at 4
filterColumn = 4;
} else {
//if no patient column, just copy the file (i.e. PROCEDURE)
parser.close();
LOG.info("Copying file without PatientId " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
Map<String, Integer> headerMap = parser.getHeaderMap();
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Adastra Subset");
try {
Set<String> caseIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line: lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
//adastra extract files are all keyed on caseId
caseIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createAdastraSubsetForFile(sourceDir, destDir, caseIds);
LOG.info("Finished Creating Adastra Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile: files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createAdastraSubsetForFile(sourceFile, destFile, caseIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|');
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//CaseRef column at 0
if (name.contains("NOTES") || name.contains("CASEQUESTIONS") ||
name.contains("OUTCOMES") || name.contains("CONSULTATION") ||
name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") ||
name.contains("PATIENT")) {
filterColumn = 0;
} else if (name.contains("CASE")) {
//CaseRef column at 2
filterColumn = 2;
} else if (name.contains("PROVIDER")) {
//CaseRef column at 7
filterColumn = 7;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String caseId = csvRecord.get(filterColumn);
if (caseIds.contains(caseId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void exportFhirToCsv(UUID serviceId, String destinationPath) {
try {
File dir = new File(destinationPath);
if (dir.exists()) {
dir.mkdirs();
}
Map<String, CSVPrinter> hmPrinters = new HashMap<>();
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current");
LOG.debug("Running query");
ResultSet rs = ps.executeQuery();
LOG.debug("Got result set");
while (rs.next()) {
String id = rs.getString(1);
String type = rs.getString(2);
String json = rs.getString(3);
CSVPrinter printer = hmPrinters.get(type);
if (printer == null) {
String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv");
FileWriter fileWriter = new FileWriter(new File(path));
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader("resource_id", "resource_json")
.withDelimiter('\t')
.withEscape((Character) null)
.withQuote((Character) null)
.withQuoteMode(QuoteMode.MINIMAL);
printer = new CSVPrinter(bufferedWriter, format);
hmPrinters.put(type, printer);
}
printer.printRecord(id, json);
}
for (String type : hmPrinters.keySet()) {
CSVPrinter printer = hmPrinters.get(type);
printer.flush();
printer.close();
}
ps.close();
entityManager.close();
} catch (Throwable t) {
LOG.error("", t);
}
}
}
/*class ResourceFiler extends FhirResourceFiler {
public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError,
List<UUID> batchIdsCreated, int maxFilingThreads) {
super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads);
}
private List<Resource> newResources = new ArrayList<>();
public List<Resource> getNewResources() {
return newResources;
}
@Override
public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling saveAdminResource");
}
@Override
public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deleteAdminResource");
}
@Override
public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
for (Resource resource: resources) {
if (mapIds) {
IdHelper.mapIds(getServiceId(), getSystemId(), resource);
}
newResources.add(resource);
}
}
@Override
public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deletePatientResource");
}
}*/
|
package org.endeavourhealth.queuereader;
import OpenPseudonymiser.Crypto;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.rabbitmq.client.*;
import org.apache.commons.csv.*;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.endeavourhealth.common.cache.ObjectMapperPool;
import org.endeavourhealth.common.config.ConfigManager;
import org.endeavourhealth.common.fhir.PeriodHelper;
import org.endeavourhealth.common.fhir.ReferenceHelper;
import org.endeavourhealth.common.utility.*;
import org.endeavourhealth.core.configuration.ConfigDeserialiser;
import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig;
import org.endeavourhealth.core.configuration.QueueReaderConfiguration;
import org.endeavourhealth.core.csv.CsvHelper;
import org.endeavourhealth.core.database.dal.DalProvider;
import org.endeavourhealth.core.database.dal.admin.LibraryRepositoryHelper;
import org.endeavourhealth.core.database.dal.admin.ServiceDalI;
import org.endeavourhealth.core.database.dal.admin.models.Service;
import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI;
import org.endeavourhealth.core.database.dal.audit.ExchangeDalI;
import org.endeavourhealth.core.database.dal.audit.models.Exchange;
import org.endeavourhealth.core.database.dal.audit.models.ExchangeBatch;
import org.endeavourhealth.core.database.dal.audit.models.ExchangeTransformAudit;
import org.endeavourhealth.core.database.dal.audit.models.HeaderKeys;
import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI;
import org.endeavourhealth.core.database.dal.ehr.ResourceDalI;
import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper;
import org.endeavourhealth.core.database.dal.publisherTransform.InternalIdDalI;
import org.endeavourhealth.core.database.dal.reference.PostcodeDalI;
import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup;
import org.endeavourhealth.core.database.dal.subscriberTransform.EnterpriseIdDalI;
import org.endeavourhealth.core.database.dal.subscriberTransform.models.EnterpriseAge;
import org.endeavourhealth.core.database.rdbms.ConnectionManager;
import org.endeavourhealth.core.database.rdbms.enterprise.EnterpriseConnector;
import org.endeavourhealth.core.exceptions.TransformException;
import org.endeavourhealth.core.fhirStorage.FhirSerializationHelper;
import org.endeavourhealth.core.fhirStorage.FhirStorageService;
import org.endeavourhealth.core.fhirStorage.JsonServiceInterfaceEndpoint;
import org.endeavourhealth.core.messaging.pipeline.components.OpenEnvelope;
import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange;
import org.endeavourhealth.core.queueing.QueueHelper;
import org.endeavourhealth.core.xml.QueryDocument.*;
import org.endeavourhealth.core.xml.TransformErrorSerializer;
import org.endeavourhealth.core.xml.TransformErrorUtility;
import org.endeavourhealth.core.xml.transformError.TransformError;
import org.endeavourhealth.transform.barts.schema.PPALI;
import org.endeavourhealth.transform.barts.schema.PPATI;
import org.endeavourhealth.transform.common.*;
import org.endeavourhealth.transform.common.resourceBuilders.GenericBuilder;
import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer;
import org.endeavourhealth.transform.emis.csv.helpers.EmisCsvHelper;
import org.endeavourhealth.transform.emis.csv.schema.appointment.Slot;
import org.endeavourhealth.transform.emis.csv.transforms.appointment.SessionUserTransformer;
import org.endeavourhealth.transform.emis.csv.transforms.appointment.SlotTransformer;
import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.*;
import org.hl7.fhir.instance.model.Resource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.persistence.EntityManager;
import java.io.*;
import java.lang.System;
import java.lang.reflect.Constructor;
import java.net.InetAddress;
import java.net.Socket;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.sql.Connection;
import java.sql.*;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Date;
import java.util.concurrent.Callable;
import java.util.concurrent.atomic.AtomicInteger;
public class Main {
private static final Logger LOG = LoggerFactory.getLogger(Main.class);
public static void main(String[] args) throws Exception {
String configId = args[0];
LOG.info("Initialising config manager");
ConfigManager.initialize("queuereader", configId);
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEncounters")) {
String table = args[1];
fixEncounters(table);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestRabbit")) {
String nodes = args[1];
String username = args[2];
String password = args[3];
String exchangeName = args[4];
String queueName = args[5];
String sslProtocol = null;
if (args.length > 6) {
sslProtocol = args[6];
}
testRabbit(nodes, username, password, sslProtocol, exchangeName, queueName);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisEpisodes1")) {
String odsCode = args[1];
//fixEmisEpisodes1(odsCode);
fixEmisEpisodes2(odsCode);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateHomertonSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateAdastraSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateVisionSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTppSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createTppSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsSubset")) {
String sourceDirPath = args[1];
UUID serviceUuid = UUID.fromString(args[2]);
UUID systemUuid = UUID.fromString(args[3]);
String samplePatientsFile = args[4];
createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateEmisSubset")) {
String sourceDirPath = args[1];
String destDirPath = args[2];
String samplePatientsFile = args[3];
createEmisSubset(sourceDirPath, destDirPath, samplePatientsFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FindBartsPersonIds")) {
String sourceFile = args[1];
UUID serviceUuid = UUID.fromString(args[2]);
UUID systemUuid = UUID.fromString(args[3]);
String dateCutoffStr = args[4];
String dstFile = args[5];
findBartsPersonIds(sourceFile, serviceUuid, systemUuid, dateCutoffStr, dstFile);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixTPPNullOrgs")) {
String sourceDirPath = args[1];
String orgODS = args[2];
LOG.info("Fixing TPP Null Organisations");
fixTPPNullOrgs(sourceDirPath, orgODS);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisDeletedPatients")) {
String odsCode = args[1];
fixEmisDeletedPatients(odsCode);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestMetrics")) {
testMetrics();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("TestGraphiteMetrics")) {
String host = args[1];
String port = args[2];
testGraphiteMetrics(host, port);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsOrgs")) {
String serviceId = args[1];
fixBartsOrgs(serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestPreparedStatements")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String serviceId = args[4];
testPreparedStatements(url, user, pass, serviceId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateTransformMap")) {
UUID serviceId = UUID.fromString(args[1]);
String table = args[2];
String dstFile = args[3];
createTransforMap(serviceId, table, dstFile);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportFhirToCsv")) {
UUID serviceId = UUID.fromString(args[1]);
String path = args[2];
exportFhirToCsv(serviceId, path);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestBatchInserts")) {
String url = args[1];
String user = args[2];
String pass = args[3];
String num = args[4];
String batchSize = args[5];
testBatchInserts(url, user, pass, num, batchSize);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) {
applyEmisAdminCaches();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSubscribers")) {
fixSubscriberDbs();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems")) {
String serviceId = args[1];
String systemId = args[2];
fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3Read")) {
String s3Bucket = args[1];
String s3Key = args[2];
String start = args[3];
String len = args[4];
testS3Read(s3Bucket, s3Key, start, len);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) {
String publisherId = args[1];
String systemId = args[2];
fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId));
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisProblems3")) {
String serviceId = args[1];
String systemId = args[2];
fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("CheckDeletedObs")) {
String serviceId = args[1];
String systemId = args[2];
checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) {
fixPersonsNoNhsNumber();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) {
String subscriberConfigName = args[1];
populateSubscriberUprnTable(subscriberConfigName);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertEmisGuid")) {
convertEmisGuids();
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToRabbit")) {
String exchangeName = args[1];
String srcFile = args[2];
Integer throttle = null;
if (args.length > 3) {
throttle = Integer.parseInt(args[3]);
}
postToRabbit(exchangeName, srcFile, throttle);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToProtocol")) {
String srcFile = args[1];
postToProtocol(srcFile);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsPatients")) {
UUID serviceId = UUID.fromString(args[1]);
fixBartsPatients(serviceId);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDeceasedPatients")) {
String subscriberConfig = args[1];
fixDeceasedPatients(subscriberConfig);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPseudoIds")) {
String subscriberConfig = args[1];
int threads = Integer.parseInt(args[2]);
fixPseudoIds(subscriberConfig, threads);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("MoveS3ToAudit")) {
int threads = Integer.parseInt(args[1]);
moveS3ToAudit(threads);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertFhirAudit")) {
UUID serviceId = UUID.fromString(args[1]);
int threads = Integer.parseInt(args[2]);
convertFhirAudit(serviceId, threads);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ConvertExchangeBody")) {
String systemId = args[1];
convertExchangeBody(UUID.fromString(systemId));
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixReferrals")) {
fixReferralRequests();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateNewSearchTable")) {
String table = args[1];
populateNewSearchTable(table);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixBartsEscapes")) {
String filePath = args[1];
fixBartsEscapedFiles(filePath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
String systemId = args[2];
String filePath = args[3];
postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath);
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixDisabledExtract")) {
String sharedStoragePath = args[1];
String tempDir = args[2];
String systemId = args[3];
String serviceOdsCode = args[4];
fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixEmisMissingSlots")) {
String serviceOdsCode = args[1];
fixEmisMissingSlots(serviceOdsCode);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateLastDataDate")) {
int threads = Integer.parseInt(args[1]);
int batchSize = Integer.parseInt(args[2]);
populateLastDataDate(threads, batchSize);
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestSlack")) {
testSlack();
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PostToInbound")) {
String serviceId = args[1];
boolean all = Boolean.parseBoolean(args[2]);
postToInbound(UUID.fromString(serviceId), all);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixPatientSearch")) {
String serviceId = args[1];
String systemId = null;
if (args.length > 2) {
systemId = args[2];
}
if (serviceId.equalsIgnoreCase("All")) {
fixPatientSearchAllServices(systemId);
} else {
fixPatientSearch(serviceId, systemId);
}
System.exit(0);
}*/
if (args.length >= 1
&& args[0].equalsIgnoreCase("FixSlotReferences")) {
String serviceId = args[1];
try {
UUID serviceUuid = UUID.fromString(serviceId);
fixSlotReferences(serviceUuid);
} catch (Exception ex) {
fixSlotReferencesForPublisher(serviceId);
}
System.exit(0);
}
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("TestS3VsMySQL")) {
UUID serviceUuid = UUID.fromString(args[1]);
int count = Integer.parseInt(args[2]);
int sqlBatchSize = Integer.parseInt(args[3]);
String bucketName = args[4];
testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("Exit")) {
String exitCode = args[1];
LOG.info("Exiting with error code " + exitCode);
int exitCodeInt = Integer.parseInt(exitCode);
System.exit(exitCodeInt);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("RunSql")) {
String host = args[1];
String username = args[2];
String password = args[3];
String sqlFile = args[4];
runSql(host, username, password, sqlFile);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("PopulateProtocolQueue")) {
String serviceId = null;
if (args.length > 1) {
serviceId = args[1];
}
String startingExchangeId = null;
if (args.length > 2) {
startingExchangeId = args[2];
}
populateProtocolQueue(serviceId, startingExchangeId);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEncounterTerms")) {
String path = args[1];
String outputPath = args[2];
findEncounterTerms(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FindEmisStartDates")) {
String path = args[1];
String outputPath = args[2];
findEmisStartDates(path, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("ExportHl7Encounters")) {
String sourceCsvPpath = args[1];
String outputPath = args[2];
exportHl7Encounters(sourceCsvPpath, outputPath);
System.exit(0);
}*/
/*if (args.length >= 1
&& args[0].equalsIgnoreCase("FixExchangeBatches")) {
fixExchangeBatches();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindCodes")) {
findCodes();
System.exit(0);
}*/
/*if (args.length >= 0
&& args[0].equalsIgnoreCase("FindDeletedOrgs")) {
findDeletedOrgs();
System.exit(0);
}*/
if (args.length >= 0
&& args[0].equalsIgnoreCase("LoadEmisData")) {
String serviceId = args[1];
String systemId = args[2];
String dbUrl = args[3];
String dbUsername = args[4];
String dbPassword = args[5];
String onlyThisFileType = null;
if (args.length > 6) {
onlyThisFileType = args[6];
}
loadEmisData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateEmisDataTables")) {
createEmisDataTables();
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("LoadBartsData")) {
String serviceId = args[1];
String systemId = args[2];
String dbUrl = args[3];
String dbUsername = args[4];
String dbPassword = args[5];
String startDate = args[6];
String onlyThisFileType = null;
if (args.length > 7) {
onlyThisFileType = args[7];
}
loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, startDate, onlyThisFileType);
System.exit(0);
}
if (args.length >= 1
&& args[0].equalsIgnoreCase("CreateBartsDataTables")) {
createBartsDataTables();
System.exit(0);
}
if (args.length != 1) {
LOG.error("Usage: queuereader config_id");
return;
}
LOG.info("
LOG.info("EDS Queue Reader " + configId);
LOG.info("
LOG.info("Fetching queuereader configuration");
String configXml = ConfigManager.getConfiguration(configId);
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
/*LOG.info("Registering shutdown hook");
registerShutdownHook();*/
// Instantiate rabbit handler
LOG.info("Creating EDS queue reader");
RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId);
// Begin consume
rabbitHandler.start();
LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")");
}
private static void testMetrics() {
LOG.info("Testing Metrics");
try {
Random r = new Random(System.currentTimeMillis());
while (true) {
String metric1 = "frailty-api.ms-duration";
Integer value1 = new Integer(r.nextInt(1000));
MetricsHelper.recordValue(metric1, value1);
if (r.nextBoolean()) {
MetricsHelper.recordEvent("frailty-api.response-code-200");
} else {
MetricsHelper.recordEvent("frailty-api.response-code-400");
}
int sleep = r.nextInt(10 * 1000);
LOG.debug("Waiting " + sleep + " ms");
Thread.sleep(sleep);
/**
* N3-MessagingAPI-01.messaging-api.frailty-api.duration-ms
N3-MessagingAPI-01.messaging-api.frailty-api.response-code (edited)
*/
}
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void testGraphiteMetrics(String host, String port) {
LOG.info("Testing Graphite metrics to " + host + " " + port);
try {
InetAddress ip = InetAddress.getLocalHost();
String hostname = ip.getHostName();
LOG.debug("Hostname = " + hostname);
String appId = ConfigManager.getAppId();
LOG.debug("AppID = " + appId);
Random r = new Random(System.currentTimeMillis());
while (true) {
Map<String, Object> metrics = new HashMap<>();
String metric1 = hostname + "." + appId + ".frailty-api.duration-ms";
Integer value1 = new Integer(r.nextInt(1000));
metrics.put(metric1, value1);
String metric2 = hostname + "." + appId+ ".frailty-api.response-code";
Integer value2 = null;
if (r.nextBoolean()) {
value2 = new Integer(200);
} else {
value2 = new Integer(400);
}
metrics.put(metric2, value2);
long timestamp = System.currentTimeMillis() / 1000;
LOG.debug("Sending metrics");
sendMetrics(host, Integer.parseInt(port), metrics, timestamp);
int sleep = r.nextInt(10 * 1000);
LOG.debug("Waiting " + sleep + " ms");
Thread.sleep(sleep);
/**
* N3-MessagingAPI-01.messaging-api.frailty-api.duration-ms
N3-MessagingAPI-01.messaging-api.frailty-api.response-code (edited)
*/
}
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void sendMetrics(String graphiteHost, int graphitePort, Map<String, Object> metrics, long timeStamp) throws Exception {
Socket socket = new Socket(graphiteHost, graphitePort);
OutputStream s = socket.getOutputStream();
PrintWriter out = new PrintWriter(s, true);
for (Map.Entry<String, Object> metric: metrics.entrySet()) {
if (metric.getValue() instanceof Integer) {
out.printf("%s %d %d%n", metric.getKey(), ((Integer)metric.getValue()).intValue(), timeStamp);
}
else if (metric.getValue() instanceof Float) {
out.printf("%s %f %d%n", metric.getKey(), ((Float)metric.getValue()).floatValue(), timeStamp);
} else {
throw new RuntimeException("Unsupported type " + metric.getValue().getClass());
}
}
out.close();
socket.close();
}
private static void fixEmisDeletedPatients(String odsCode) {
LOG.info("Fixing Emis Deleted Patients for " + odsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
Set<String> hsPatientGuidsDeductedDeceased = new HashSet<>();
Map<String, List<UUID>> hmPatientGuidsDeleted = new HashMap<>();
Map<String, List<String>> hmPatientGuidsToFix = new HashMap<>();
//exchanges are in REVERSE order (most recent first)
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
//skip if we're ignoring old data
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
//find patient file
ExchangePayloadFile patientFile = findFileOfType(files, "Admin_Patient");
if (patientFile == null) {
throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId());
}
ExchangePayloadFile agreementFile = findFileOfType(files, "Agreements_SharingOrganisation");
if (agreementFile == null) {
throw new Exception("Failed to find Agreements_SharingOrganisation file in exchange " + exchange.getId());
}
//work out file version
List<ExchangePayloadFile> filesTmp = new ArrayList<>();
filesTmp.add(patientFile);
filesTmp.add(agreementFile);
String version = EmisCsvToFhirTransformer.determineVersion(filesTmp);
//see if sharing agreement is disabled
String path = agreementFile.getPath();
org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation agreementParser = new org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation(serviceId, systemId, exchange.getId(), version, path);
agreementParser.nextRecord();
CsvCell disabled = agreementParser.getDisabled();
boolean isDisabled = disabled.getBoolean();
//create the parser
path = patientFile.getPath();
org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path);
while (parser.nextRecord()) {
CsvCell patientGuidCell = parser.getPatientGuid();
String patientGuid = patientGuidCell.getString();
CsvCell dateOfDeathCell = parser.getDateOfDeath();
CsvCell dateOfDeductionCell = parser.getDateOfDeactivation();
CsvCell deletedCell = parser.getDeleted();
if (deletedCell.getBoolean()) {
List<UUID> exchangesDeleted = hmPatientGuidsDeleted.get(patientGuid);
if (exchangesDeleted == null) {
exchangesDeleted = new ArrayList<>();
hmPatientGuidsDeleted.put(patientGuid, exchangesDeleted);
}
exchangesDeleted.add(exchange.getId());
//if this patient was previously updated with a deduction date or date of death, and the sharing
//agreement isn't disabled, then we will have deleted them and need to undelete
if (hsPatientGuidsDeductedDeceased.contains(patientGuid)
&& !isDisabled) {
List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid);
if (exchangesToFix == null) {
exchangesToFix = new ArrayList<>();
hmPatientGuidsToFix.put(patientGuid, exchangesToFix);
}
exchangesToFix.add(exchange.getId().toString() + ": Deducted/Dead and Deleted after");
}
} else {
//if the date of death of deduction is set then we need to track this
//because we're going to possibly get a delete in a years time
if (!dateOfDeathCell.isEmpty() || !dateOfDeductionCell.isEmpty()) {
hsPatientGuidsDeductedDeceased.add(patientGuid);
} else {
hsPatientGuidsDeductedDeceased.remove(patientGuid);
}
//if this patient was previously deleted and is now UN-deleted, then we'll
//need to fix the record
if (hmPatientGuidsDeleted.containsKey(patientGuid)) {
List<UUID> exchangesDeleted = hmPatientGuidsDeleted.remove(patientGuid);
List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid);
if (exchangesToFix == null) {
exchangesToFix = new ArrayList<>();
hmPatientGuidsToFix.put(patientGuid, exchangesToFix);
}
for (UUID exchangeId: exchangesDeleted) {
exchangesToFix.add(exchangeId.toString() + ": Deleted and subsequently undeleted");
}
}
}
}
parser.close();
}
LOG.info("Finished checking for affected patients - found " + hmPatientGuidsToFix.size() + " patients to fix");
for (String patientGuid: hmPatientGuidsToFix.keySet()) {
List<String> exchangeIds = hmPatientGuidsToFix.get(patientGuid);
LOG.info("Patient " + patientGuid);
for (String exchangeId: exchangeIds) {
LOG.info(" Exchange Id " + exchangeId);
}
//log out the UUID for the patient too
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, null, null, false, null);
Reference ref = ReferenceHelper.createReference(ResourceType.Patient, patientGuid);
ref = IdHelper.convertLocallyUniqueReferenceToEdsReference(ref, csvHelper);
LOG.debug(" Patient UUID " + ref.getReference());
String patientUuidStr = ReferenceHelper.getReferenceId(ref);
UUID patientUuid = UUID.fromString(patientUuidStr);
Set<UUID> hsExchangeIdsDone = new HashSet<>();
for (String exchangeId: exchangeIds) {
UUID exchangeUuid = UUID.fromString(exchangeId.split(":")[0]);
//in some cases, the same exchange was found twice
if (hsExchangeIdsDone.contains(exchangeUuid)) {
continue;
}
hsExchangeIdsDone.add(exchangeUuid);
Exchange exchange = exchangeDal.getExchange(exchangeUuid);
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
List<UUID> batchIdsCreated = new ArrayList<>();
TransformError transformError = new TransformError();
FhirResourceFiler filer = new FhirResourceFiler(exchangeUuid, serviceId, systemId, transformError, batchIdsCreated);
//get all exchange batches for our patient
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeUuid);
for (ExchangeBatch batch: batches) {
UUID batchPatient = batch.getEdsPatientId();
if (batchPatient == null || !batchPatient.equals(patientUuid)) {
continue;
}
//get all resources for this batch
List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesForBatch(serviceId, batch.getBatchId());
//restore each resource
for (ResourceWrapper resourceWrapper: resourceWrappers) {
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceWrapper.getResourceType(), resourceWrapper.getResourceId());
//most recent is first
ResourceWrapper mostRecent = history.get(0);
if (!mostRecent.isDeleted()) {
continue;
}
//find latest non-deleted version and save it over the deleted version
for (ResourceWrapper historyItem: history) {
if (!historyItem.isDeleted()) {
Resource resource = FhirSerializationHelper.deserializeResource(historyItem.getResourceData());
GenericBuilder builder = new GenericBuilder(resource);
filer.savePatientResource(null, false, builder);
break;
}
}
}
}
filer.waitToFinish();
//set new batch ID in exchange header
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
//post new batch to protocol Q
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished Fixing Emis Deleted Patients for " + odsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static ExchangePayloadFile findFileOfType(List<ExchangePayloadFile> files, String fileType) {
for (ExchangePayloadFile file: files) {
if (file.getType().equals(fileType)) {
return file;
}
}
return null;
}
private static void fixEmisEpisodes2(String odsCode) {
LOG.info("Fixing Emis Episodes (2) for " + odsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal();
Set<String> patientGuidsDone = new HashSet<>();
//exchanges are in REVERSE order (most recent first)
for (int i=exchanges.size()-1; i>=0; i
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
//skip if we're ignoring old data
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
//find patient file
ExchangePayloadFile patientFile = null;
for (ExchangePayloadFile file: files) {
if (file.getType().equals("Admin_Patient")) {
patientFile = file;
break;
}
}
if (patientFile == null) {
throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId());
}
String path = patientFile.getPath();
List<ExchangePayloadFile> filesTmp = new ArrayList<>();
filesTmp.add(patientFile);
String version = EmisCsvToFhirTransformer.determineVersion(filesTmp);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path);
while (parser.nextRecord()) {
CsvCell deletedCell = parser.getDeleted();
if (deletedCell.getBoolean()) {
continue;
}
//skip patients already done
CsvCell patientGuidCell = parser.getPatientGuid();
String patientGuid = patientGuidCell.getString();
if (patientGuidsDone.contains(patientGuid)) {
continue;
}
patientGuidsDone.add(patientGuid);
//check we've not already converted this patient previously (i.e. re-running this conversion)
CsvCell startDateCell = parser.getDateOfRegistration();
if (startDateCell.isEmpty()) {
LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId());
startDateCell = CsvCell.factoryDummyWrapper("1900-01-01");
}
//save internal ID map
String key = patientGuidCell.getString();
String value = startDateCell.getString();
internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value);
}
parser.close();
}
LOG.info("Finished Fixing Emis Episodes (2) for " + odsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void fixEmisEpisodes1(String odsCode) {
LOG.info("Fixing Emis Episodes (1) for " + odsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(odsCode);
LOG.info("Service " + service.getId() + " -> " + service.getName());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
LOG.info("Found " + exchanges.size() + " exchanges");
InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal();
Set<String> patientGuidsDone = new HashSet<>();
//exchanges are in REVERSE order (most recent first)
for (Exchange exchange: exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
//skip if we're ignoring old data
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
//find patient file
ExchangePayloadFile patientFile = null;
for (ExchangePayloadFile file: files) {
if (file.getType().equals("Admin_Patient")) {
patientFile = file;
break;
}
}
if (patientFile == null) {
throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId());
}
String path = patientFile.getPath();
List<ExchangePayloadFile> filesTmp = new ArrayList<>();
filesTmp.add(patientFile);
String version = EmisCsvToFhirTransformer.determineVersion(filesTmp);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path);
while (parser.nextRecord()) {
CsvCell deletedCell = parser.getDeleted();
if (deletedCell.getBoolean()) {
continue;
}
//skip patients already done
CsvCell patientGuidCell = parser.getPatientGuid();
String patientGuid = patientGuidCell.getString();
if (patientGuidsDone.contains(patientGuid)) {
continue;
}
patientGuidsDone.add(patientGuid);
//check we've not already converted this patient previously (i.e. re-running this conversion)
String key = patientGuidCell.getString();
String existingIdMapValue = internalIdDal.getDestinationId(serviceId, "Emis_Latest_Reg_Date", key);
if (existingIdMapValue != null) {
continue;
}
CsvCell startDateCell = parser.getDateOfRegistration();
if (startDateCell.isEmpty()) {
LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId());
startDateCell = CsvCell.factoryDummyWrapper("1900-01-01");
}
//find the existing UUID we've previously allocated
String oldSourceId = patientGuid;
UUID episodeUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.EpisodeOfCare, oldSourceId);
if (episodeUuid == null) {
LOG.error("Null episode UUID for old source ID " + oldSourceId + " in exchange " + exchange.getId());
continue;
}
//save ID reference mapping
String newSourceId = patientGuid + ":" + startDateCell.getString();
UUID newEpisodeUuid = IdHelper.getOrCreateEdsResourceId(serviceId, ResourceType.EpisodeOfCare, newSourceId, episodeUuid);
if (!newEpisodeUuid.equals(episodeUuid)) {
throw new Exception("Failed to carry over UUID for episode. Old UUID was " + episodeUuid + " new UUID is " + newEpisodeUuid + " in exchange " + exchange.getId());
}
//save internal ID map
String value = startDateCell.getString();
internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value);
}
parser.close();
}
LOG.info("Finished Fixing Emis Episodes (1) for " + odsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void testRabbit(String nodes, String username, String password, String sslProtocol, String exchangeName, String queueName) {
LOG.info("Testing RabbitMQ Connectivity on " + nodes);
LOG.info("SSL Protocol = " + sslProtocol);
LOG.info("Exchange = " + exchangeName);
LOG.info("Queue = " + queueName);
try {
//test publishing
LOG.info("Testing publishing...");
com.rabbitmq.client.Connection publishConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol);
Channel publishChannel = org.endeavourhealth.core.queueing.ConnectionManager.getPublishChannel(publishConnection, exchangeName);
publishChannel.confirmSelect();
for (int i=0; i<5; i++) {
Map<String, Object> headers = new HashMap<>();
headers.put("HeaderIndex", "" + i);
AMQP.BasicProperties properties = new AMQP.BasicProperties()
.builder()
.deliveryMode(2) // Persistent message
.headers(headers)
.build();
String body = "MessageIndex = " + i;
byte[] bytes = body.getBytes();
publishChannel.basicPublish(
exchangeName,
"All", //routing key
properties,
bytes);
}
publishChannel.close();
publishConnection.close();
LOG.info("...Finished testing publishing");
//test consuming
LOG.info("Testing reading...");
com.rabbitmq.client.Connection readConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol);
Channel readChannel = readConnection.createChannel();
readChannel.basicQos(1);
Consumer consumer = new TestRabbitConsumer(readChannel);
readChannel.basicConsume(queueName, false, "TestRabbitConsumer", false, true, null, consumer);
LOG.info("Reader Connected (ctrl+c to close) will quit in 30s");
Thread.sleep(30 * 1000);
LOG.info("Finished Testing RabbitMQ Connectivity");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void populateLastDataDate(int threads, int batchSize) {
LOG.debug("Populating last data date");
try {
int processed = 0;
AtomicInteger fixed = new AtomicInteger();
ThreadPool threadPool = new ThreadPool(threads, batchSize);
while (true) {
String sql = "SELECT id FROM drewtest.exchange_ids WHERE done = 0 LIMIT " + batchSize;
//LOG.debug("Getting new batch using: " + sql);
EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager();
SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate();
Connection auditConnection = auditSession.connection();
Statement statement = auditConnection.createStatement();
ResultSet rs = statement.executeQuery(sql);
List<UUID> exchangeIds = new ArrayList<>();
while (rs.next()) {
String s = rs.getString(1);
//LOG.debug("Got back exchange ID " + s);
exchangeIds.add(UUID.fromString(s));
}
rs.close();
statement.close();
auditEntityManager.close();
for (UUID exchangeId: exchangeIds) {
threadPool.submit(new PopulateDataDateCallable(exchangeId, fixed));
}
List<ThreadPoolError> errs = threadPool.waitUntilEmpty();
if (!errs.isEmpty()) {
LOG.debug("Got " + errs.size() + " errors");
for (ThreadPoolError err: errs) {
LOG.error("", err.getException());
}
break;
}
processed += exchangeIds.size();
LOG.debug("processed " + processed + " fixed " + fixed.get());
//if finished
if (exchangeIds.size() < batchSize) {
break;
}
}
threadPool.waitAndStop();
LOG.debug("Finished Populating last data date");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixEmisMissingSlots(String serviceOdsCode) {
LOG.debug("Fixing Emis Missing Slots for " + serviceOdsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(serviceOdsCode);
LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId());
List<UUID> systemIds = findSystemIds(service);
if (systemIds.size() != 1) {
throw new Exception("Found " + systemIds.size() + " for service");
}
UUID systemId = systemIds.get(0);
UUID serviceId = service.getId();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
Set<String> hsSlotsToSkip = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
File auditFile = new File("SlotAudit_" + serviceOdsCode + ".csv");
LOG.debug("Auditing to " + auditFile);
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
if (exchangeConfig == null) {
throw new Exception("Failed to find PostMessageToExchange config details for exchange EdsProtocol");
}
//the list of exchanges is most-recent-first, so iterate backwards to do them in order
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
//skip exchanges that are for custom extracts
if (files.size() <= 1) {
continue;
}
boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files);
if (!processPatientData) {
continue;
}
ExchangeTransformAudit transformAudit = new ExchangeTransformAudit();
transformAudit.setServiceId(serviceId);
transformAudit.setSystemId(systemId);
transformAudit.setExchangeId(exchange.getId());
transformAudit.setId(UUID.randomUUID());
transformAudit.setStarted(new Date());
String version = EmisCsvToFhirTransformer.determineVersion(files);
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), null, processPatientData, null);
//the processor is responsible for saving FHIR resources
TransformError transformError = new TransformError();
List<UUID> batchIdsCreated = new ArrayList<>();
FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(exchange.getId(), serviceId, systemId, transformError, batchIdsCreated);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchange.getId(), files, version, parsers);
try {
//cache the practitioners for each session
SessionUserTransformer.transform(parsers, fhirResourceFiler, csvHelper);
Slot parser = (Slot) parsers.get(Slot.class);
while (parser.nextRecord()) {
//should this record be transformed?
//the slots CSV contains data on empty slots too; ignore them
CsvCell patientGuid = parser.getPatientGuid();
if (patientGuid.isEmpty()) {
continue;
}
//the EMIS data contains thousands of appointments that refer to patients we don't have, so I'm explicitly
//handling this here, and ignoring any Slot record that is in this state
UUID patientEdsId = IdHelper.getEdsResourceId(fhirResourceFiler.getServiceId(), ResourceType.Patient, patientGuid.getString());
if (patientEdsId == null) {
continue;
}
//see if this appointment has previously been transformed
CsvCell slotGuid = parser.getSlotGuid();
String uniqueId = patientGuid.getString() + ":" + slotGuid.getString();
if (!hsSlotsToSkip.contains(uniqueId)) {
//transform this slot record if no appt already exists for it
boolean alreadyExists = false;
UUID discoveryId = IdHelper.getEdsResourceId(serviceId, ResourceType.Slot, uniqueId);
if (discoveryId != null) {
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Slot.toString(), discoveryId);
if (!history.isEmpty()) {
alreadyExists = true;
}
}
if (alreadyExists) {
hsSlotsToSkip.add(uniqueId);
}
}
if (hsSlotsToSkip.contains(uniqueId)) {
continue;
}
hsSlotsToSkip.add(uniqueId);
try {
LOG.debug("Creating slot for " + uniqueId);
SlotTransformer.createSlotAndAppointment((Slot) parser, fhirResourceFiler, csvHelper);
} catch (Exception ex) {
fhirResourceFiler.logTransformRecordError(ex, parser.getCurrentState());
}
}
csvHelper.clearCachedSessionPractitioners();
fhirResourceFiler.failIfAnyErrors();
fhirResourceFiler.waitToFinish();
} catch (Throwable ex) {
Map<String, String> args = new HashMap<>();
args.put(TransformErrorUtility.ARG_FATAL_ERROR, ex.getMessage());
TransformErrorUtility.addTransformError(transformError, ex, args);
LOG.error("", ex);
}
transformAudit.setEnded(new Date());
transformAudit.setNumberBatchesCreated(new Integer(batchIdsCreated.size()));
if (transformError.getError().size() > 0) {
transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError));
}
//save our audit if something went wrong or was saved
if (transformError.getError().size() > 0
|| !batchIdsCreated.isEmpty()) {
exchangeDal.save(transformAudit);
}
//send to Rabbit protocol queue
if (!batchIdsCreated.isEmpty()) {
//write batch ID to file, so we have an audit of what we created
List<String> lines = new ArrayList<>();
for (UUID batchId : batchIdsCreated) {
lines.add("\"" + exchange.getId() + "\",\"" + batchId + "\"");
}
Files.write(auditFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
String batchesJson = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchesJson);
//send to Rabbit
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
if (transformError.getError().size() > 0) {
throw new Exception("Dropping out due to error in transform");
}
}
LOG.debug("Finished Fixing Emis Missing Slots for " + serviceOdsCode);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void findBartsPersonIds(String sourceFile, UUID serviceUuid, UUID systemUuid, String dateCutoffStr, String destFile) {
LOG.debug("Finding Barts person IDs for " + sourceFile);
try {
//read NHS numbers into memory
Set<String> hsNhsNumbers = new HashSet<>();
List<String> listNhsNumbers = new ArrayList<>();
File src = new File(sourceFile);
List<String> lines = Files.readAllLines(src.toPath());
for (String line : lines) {
String s = line.trim();
hsNhsNumbers.add(s);
listNhsNumbers.add(s); //maintain a list so we can preserve the ordering
}
LOG.debug("Looking for Person IDs for " + hsNhsNumbers.size() + " nhs numbers or any since " + dateCutoffStr);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date dateCutoff = sdf.parse(dateCutoffStr);
Map<String, Set<String>> hmMatches = new HashMap<>();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile file : files) {
String parentPath = new File(file.getPath()).getParent();
String parentDir = FilenameUtils.getBaseName(parentPath);
Date extractDate = sdf.parse(parentDir);
boolean inDateRange = !extractDate.before(dateCutoff);
String type = file.getType();
if (type.equals("PPATI")) {
PPATI parser = new PPATI(null, null, null, null, file.getPath());
while (parser.nextRecord()) {
CsvCell nhsNumberCell = parser.getNhsNumber();
String nhsNumber = nhsNumberCell.getString();
nhsNumber = nhsNumber.replace("-", "");
if (hsNhsNumbers.contains(nhsNumber)
|| inDateRange) {
CsvCell personIdCell = parser.getMillenniumPersonId();
String personId = personIdCell.getString();
Set<String> s = hmMatches.get(nhsNumber);
if (s == null) {
s = new HashSet<>();
hmMatches.put(nhsNumber, s);
}
s.add(personId);
}
}
parser.close();
} else if (type.equals("PPALI")) {
PPALI parser = new PPALI(null, null, null, null, file.getPath());
while (parser.nextRecord()) {
CsvCell aliasCell = parser.getAlias();
//not going to bother trying to filter on alias type, since it won't hurt to include
//extra patients, if they have an MRN that accidentally matches one of the NHS numbers being searched for
String alias = aliasCell.getString();
if (hsNhsNumbers.contains(alias)
|| inDateRange) {
//NHS numbers in PPALI don't have the extra hyphens
CsvCell personIdCell = parser.getMillenniumPersonIdentifier();
String personId = personIdCell.getString();
Set<String> s = hmMatches.get(alias);
if (s == null) {
s = new HashSet<>();
hmMatches.put(alias, s);
}
s.add(personId);
}
}
parser.close();
} else {
//just ignore other file types
}
}
}
LOG.debug("" + hmMatches.size() + " / " + hsNhsNumbers.size() + " NHS numbers had person IDs found");
List<String> newLines = new ArrayList<>();
for (String nhsNumber : listNhsNumbers) {
Set<String> personIds = hmMatches.get(nhsNumber);
if (personIds == null) {
LOG.error("Failed to find person ID for " + nhsNumber);
continue;
}
newLines.add("#NHS " + nhsNumber + ":");
for (String personId : personIds) {
newLines.add(personId);
}
}
File dst = new File(destFile);
if (dst.exists()) {
dst.delete();
}
Files.write(dst.toPath(), newLines);
LOG.debug("Finished Finding Barts person IDs for " + sourceFile);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createEmisDataTables() {
LOG.debug("Creating Emis data tables");
try {
List<String> fileTypes = new ArrayList<>();
fileTypes.add("Admin_Location");
fileTypes.add("Admin_OrganisationLocation");
fileTypes.add("Admin_Organisation");
fileTypes.add("Admin_Patient");
fileTypes.add("Admin_UserInRole");
fileTypes.add("Agreements_SharingOrganisation");
fileTypes.add("Appointment_SessionUser");
fileTypes.add("Appointment_Session");
fileTypes.add("Appointment_Slot");
fileTypes.add("CareRecord_Consultation");
fileTypes.add("CareRecord_Diary");
fileTypes.add("CareRecord_ObservationReferral");
fileTypes.add("CareRecord_Observation");
fileTypes.add("CareRecord_Problem");
fileTypes.add("Coding_ClinicalCode");
fileTypes.add("Coding_DrugCode");
fileTypes.add("Prescribing_DrugRecord");
fileTypes.add("Prescribing_IssueRecord");
fileTypes.add("Audit_PatientAudit");
fileTypes.add("Audit_RegistrationAudit");
for (String fileType : fileTypes) {
createEmisDataTable(fileType);
}
LOG.debug("Finished Creating Emis data tables");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createEmisDataTable(String fileType) throws Exception {
ParserI parser = createParserForEmisFileType(fileType, null);
if (parser == null) {
return;
}
System.out.println("-- " + fileType);
String table = fileType.replace(" ", "_");
String dropSql = "DROP TABLE IF EXISTS `" + table + "`;";
System.out.println(dropSql);
String sql = "CREATE TABLE `" + table + "` (";
sql += "file_name varchar(100)";
sql += ", ";
sql += "extract_date datetime";
if (parser instanceof AbstractFixedParser) {
AbstractFixedParser fixedParser = (AbstractFixedParser) parser;
List<FixedParserField> fields = fixedParser.getFieldList();
for (FixedParserField field : fields) {
String col = field.getName();
int len = field.getFieldlength();
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
sql += " varchar(";
sql += len;
sql += ")";
}
} else {
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
if (col.equals("BLOB_CONTENTS")
|| col.equals("VALUE_LONG_TXT")
|| col.equals("COMMENT_TXT")
|| col.equals("NONPREG_REL_PROBLM_SCT_CD")) {
sql += " mediumtext";
} else if (col.indexOf("Date") > -1
|| col.indexOf("Time") > -1) {
sql += " varchar(10)";
} else {
sql += " varchar(255)";
}
}
}
sql += ");";
/*LOG.debug("-- fileType");
LOG.debug(sql);*/
System.out.println(sql);
}
private static void convertFhirAudit(UUID serviceId, int threads) {
LOG.info("Converting FHIR audit for " + serviceId);
try {
//get all systems
//for each file in publisher transform
// need to create new version in audit
// go through each file and generate the published_file_record entries
// store map of old audit file ID -> new file ID
//update transform_warning table, to set new file ID
//for each audit JSON record
// deserialise
// convert to new format
// what about FHIR ones that point to the wrong DB, like the Emis code map ones dp?
// save to new DB
// delete from old DB
LOG.info("Finished Converting FHIR audit for " + serviceId);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void moveS3ToAudit(int threads) {
LOG.info("Moving S3 to Audit");
try {
//list S3 contents
List<FileInfo> files = FileHelper.listFilesInSharedStorageWithInfo("s3://discoveryaudit/audit");
LOG.debug("Found " + files.size() + " audits");
int countPerThread = files.size() / threads;
int pos = 0;
AtomicInteger done = new AtomicInteger();
List<Thread> threadList = new ArrayList<>();
for (int i=0; i<threads; i++) {
List<FileInfo> perThread = new ArrayList<>();
int countThisThread = countPerThread;
if (i+1 == threads) {
countThisThread = files.size() - pos;
}
for (int j=0; j<countThisThread; j++) {
FileInfo fileInfo = files.get(pos);
pos ++;
perThread.add(fileInfo);
}
MoveToS3Runnable r = new MoveToS3Runnable(perThread, done);
Thread t = new Thread(r);
threadList.add(t);
t.start();
}
while (true) {
Thread.sleep(5000);
boolean allDone = true;
for (Thread t: threadList) {
if (t.getState() != Thread.State.TERMINATED) {
//if (!t.isAlive()) {
allDone = false;
break;
}
}
if (allDone) {
break;
}
}
LOG.debug("Finished with " + done.get() + " / " + files.size());
LOG.info("Finished Moving S3 to Audit");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void convertEmisGuids() {
LOG.debug("Converting Emis Guid");
try {
Map<String, String> map = new HashMap<>();
//this list of guids and dates is based off the live Emis extracts, giving the most recent bulk date for each organisation
//only practices where the extract started before the move to AWS and where the extract was disabled and re-bulked need to be in here.
//Practices disabled and re-bulked since the move to AWS are handled differently.
map.put("{DD31E915-7076-46CF-99CD-8378AB588B69}", "20/07/2017");
map.put("{87A8851C-3DA4-4BE0-869C-3BF6BA7C0612}", "15/10/2017");
map.put("{612DCB3A-5BE6-4D50-909B-F0F20565F9FC}", "09/08/2017");
map.put("{15667F8D-46A0-4A87-9FA8-0C56B157A0A9}", "05/05/2017");
map.put("{3CFEFBF9-B856-4A40-A39A-4EB6FA39295E}", "31/01/2017");
map.put("{3F481450-AD19-4793-B1F0-40D5C2C57EF7}", "04/11/2017");
map.put("{83939542-20E4-47C5-9883-BF416294BB22}", "13/10/2017");
map.put("{73AA7E3A-4331-4167-8711-FE07DDBF4657}", "15/10/2017");
map.put("{3B703CCF-C527-4EC8-A802-00D3B1535DD0}", "01/02/2017");
map.put("{ED442CA3-351F-43E4-88A2-2EEACE39A402}", "13/10/2017");
map.put("{86537B5B-7CF3-4964-8906-7C10929FBC20}", "13/05/2017");
map.put("{9A4518C4-82CE-4509-8039-1B5F49F9C1FA}", "12/08/2017");
map.put("{16D7F8F9-4A35-44B1-8F1D-DD0162584684}", "11/07/2017");
map.put("{D392C499-345C-499B-898C-93F2CB8CC1B9}", "15/10/2017");
map.put("{5B87882A-0EE8-4233-93D0-D2F5F4F94040}", "15/03/2017");
map.put("{CFE3B460-9058-47FB-BF1D-6BEC13A2257D}", "19/04/2017");
map.put("{7B03E105-9275-47CC-8022-1469FE2D6AE4}", "20/04/2017");
map.put("{94470227-587C-47D7-A51F-9893512424D8}", "27/04/2017");
map.put("{734F4C99-6326-4CA4-A22C-632F0AC12FFC}", "17/10/2017");
map.put("{03C5B4B4-1A70-45F8-922E-135C826D48E0}", "20/04/2017");
map.put("{1BB17C3F-CE80-4261-AF6C-BE987E3A5772}", "09/05/2017");
map.put("{16F6DD42-2140-4395-95D5-3FA50E252896}", "20/04/2017");
map.put("{3B6FD632-3FFB-48E6-9775-287F6C486752}", "15/10/2017");
map.put("{F987F7BD-E19C-46D2-A446-913489F1BB7A}", "05/02/2017");
map.put("{BE7CC1DC-3CAB-4BB1-A5A2-B0C854C3B78E}", "06/07/2017");
map.put("{303EFA4E-EC8F-4CBC-B629-960E4D799E0D}", "15/10/2017");
map.put("{5EE8FD1F-F23A-4209-A1EE-556F9350C900}", "01/02/2017");
map.put("{04F6C555-A298-45F1-AC5E-AC8EBD2BB720}", "17/10/2017");
map.put("{67383254-F7F1-4847-9AA9-C7DCF32859B8}", "17/10/2017");
map.put("{31272E4E-40E0-4103-ABDC-F40A7B75F278}", "19/10/2017");
map.put("{09CA2E3B-7143-4999-9934-971F3F2E6D8C}", "15/10/2017");
map.put("{0527BCE2-4315-47F2-86A1-2E9F3E50399B}", "15/10/2017");
map.put("{16DD14B5-D1D5-4B0C-B886-59AC4DACDA7A}", "04/07/2017");
map.put("{411D0A79-6913-473C-B486-C01F6430D8A6}", "21/09/2017");
map.put("{0862FADA-594A-415E-B971-7A4312E0A58C}", "10/06/2017");
map.put("{249C3F3C-24F0-44CE-97A9-B535982BD70C}", "15/10/2017");
map.put("{5D7A1915-6E22-4B20-A8AE-4768C06D3BBF}", "28/09/2017"); //Barts community
map.put("{131AE556-8B50-4C17-9D7D-A4B19F7B1FEA}", "15/10/2017"); //Aberfeldy practice F84698
map.put("{C0D2D0DF-EF78-444D-9A6D-B9EDEF5EF350}", "13/10/2017");
map.put("{F174B354-4156-4BCB-960F-35D0145075EA}", "01/02/2017");
map.put("{38600D63-1DE0-4910-8ED6-A38DC28A9DAA}", "19/02/2018"); //THE SPITALFIELDS PRACTICE (CDB 16);F84081
map.put("{B3ECA2DE-D926-4594-B0EA-CF2F28057CE1}", "19/10/2017");
map.put("{18F7C28B-2A54-4F82-924B-38C60631FFFA}", "04/02/2018"); //Rowans Surgery (CDB 18174);H85035
map.put("{16FB5EE8-5039-4068-BC42-1DB56DC2A530}", "08/06/2017");
map.put("{4BA4A5AC-7B25-40B2-B0EA-135702A72F9D}", "15/10/2017");
map.put("{01B8341F-BC8F-450E-8AFA-4CDA344A5009}", "15/10/2017");
map.put("{E6FBEA1C-BDA2-40B7-A461-C262103F08D7}", "08/06/2017");
map.put("{141C68EB-1BC8-4E99-A9D9-0E63A8944CA9}", "15/10/2017");
map.put("{A3EA804D-E7EB-43EE-8F1F-E860F6337FF7}", "15/10/2017");
map.put("{771B42CC-9C0C-46E2-8143-76F04AF91AD5}", "13/11/2017"); //cranwich road
map.put("{16EA8D5C-C667-4818-B629-5D6F4300FEEF}", "11/05/2017");
map.put("{29E51964-C94D-4CB4-894E-EB18E27DEFC1}", "15/10/2017");
map.put("{3646CCA5-7FE4-4DFE-87CD-DA3CE1BA885D}", "27/09/2017");
map.put("{3EC82820-702F-4218-853B-D3E5053646A8}", "05/05/2017");
map.put("{37F3E676-B203-4329-97F8-2AF5BFEAEE5A}", "19/10/2017");
map.put("{A0E3208B-95E9-4284-9B5A-D4D387CCC9F9}", "07/06/2017");
map.put("{0BEAF1F0-9507-4AC2-8997-EC0BA1D0247E}", "19/10/2017");
map.put("{071A50E7-1764-4210-94EF-6A4BF96CF753}", "21/02/2017");
map.put("{0C1983D8-FB7D-4563-84D0-1F8F6933E786}", "20/07/2017");
map.put("{871FEEB2-CE30-4603-B9A3-6FA6CC47B5D4}", "15/10/2017");
map.put("{42906EBE-8628-486D-A52F-27B935C9937A}", "01/02/2017");
map.put("{1AB7ABF3-2572-4D07-B719-CFB2FE3AAC80}", "15/10/2017");
map.put("{E312A5B7-13E7-4E43-BE35-ED29F6216D3C}", "20/04/2017");
map.put("{55E60891-8827-40CD-8011-B0223D5C8970}", "15/10/2017");
map.put("{03A63F52-7FEE-4592-9B54-83CEBCF67B5D}", "26/04/2017");
map.put("{DB39B649-B48D-4AC2-BAB1-AC807AABFAC4}", "15/10/2017");
map.put("{0AF9B2AF-A0FB-40B0-BA05-743BA6845DB1}", "26/08/2017");
map.put("{A7600092-319C-4213-92C2-738BEEFC1609}", "31/01/2017");
map.put("{5A1AABA9-7E96-41E7-AF18-E02F4CF1DFB6}", "15/10/2017");
map.put("{7D8CE31D-66AA-4D6A-9EFD-313646BD1D73}", "15/10/2017");
map.put("{03EA4A79-B6F1-4524-9D15-992B47BCEC9A}", "15/10/2017");
map.put("{4588C493-2EA3-429A-8428-E610AE6A6D76}", "28/09/2017"); //Barts community
map.put("{B13F3CC9-C317-4E0D-9C57-C545E4A53CAF}", "15/10/2017");
map.put("{463DA820-6EC4-48CB-B915-81B31AFBD121}", "13/10/2017");
map.put("{16F0D65C-B2A8-4186-B4E7-BBAF4390EC55}", "13/10/2017");
map.put("{0039EF15-2DCF-4F70-B371-014C807210FD}", "24/05/2017");
map.put("{E132BF05-78D9-4E4B-B875-53237E76A0FA}", "19/10/2017");
map.put("{3DFC2DA6-AD8C-4836-945D-A6F8DB22AA49}", "15/10/2017");
map.put("{BCB43B1D-2857-4186-918B-460620F98F81}", "13/10/2017");
map.put("{E134C74E-FA3E-4E14-A4BB-314EA3D3AC16}", "15/10/2017");
map.put("{C0F40044-C2CA-4D1D-95D3-553B29992385}", "26/08/2017");
map.put("{B174A018-538D-4065-838C-023A245B53DA}", "14/02/2017");
map.put("{43380A69-AE7D-4ED7-B014-0708675D0C02}", "08/06/2017");
map.put("{E503F0E0-FE56-4CEF-BAB5-0D25B834D9BD}", "13/10/2017");
map.put("{08946F29-1A53-4AF2-814B-0B8758112F21}", "07/02/2018"); //NEWHAM MEDICAL CENTRE (CDB 3461);F84669
map.put("{09857684-535C-4ED6-8007-F91F366611C6}", "19/10/2017");
map.put("{C409A597-009A-4E11-B828-A595755DE0EA}", "17/10/2017");
map.put("{58945A1C-2628-4595-8F8C-F75D93045949}", "15/10/2017");
map.put("{16FF2874-20B0-4188-B1AF-69C97055AA60}", "17/10/2017");
map.put("{2C91E9DA-3F92-464E-B6E6-61D3DE52E62F}", "15/10/2017");
map.put("{16E7AD27-2AD9-43C0-A473-1F39DF93E981}", "10/06/2017");
map.put("{A528478D-65DB-435C-9E98-F8BDB49C9279}", "20/04/2017");
map.put("{A2BDB192-E79C-44C5-97A2-1FD4517C456F}", "21/08/2017");
map.put("{73DFF193-E917-4DBC-B5CF-DD2797B29377}", "15/10/2017");
map.put("{62825316-9107-4E2C-A22C-86211B4760DA}", "13/10/2017");
map.put("{006E8A30-2A45-4DBE-91D7-1C53FADF38B1}", "28/01/2018"); //The Lawson Practice (CDB 4334);F84096
map.put("{E32AA6A6-46B1-4198-AA13-058038AB8746}", "13/10/2017");
map.put("{B51160F1-79E3-4BA7-AA3D-1112AB341146}", "30/09/2017");
map.put("{234503E5-56B4-45A0-99DA-39854FBE78E9}", "01/02/2017");
map.put("{7D1852DA-E264-4599-B9B4-8F40207F967D}", "09/10/2017");
map.put("{44716213-7FEE-4247-A09E-7285BD6B69C6}", "13/10/2017");
map.put("{19BCC870-2704-4D21-BA7B-56F2F472AF35}", "15/10/2017");
map.put("{FEF842DA-FD7C-480F-945A-D097910A81EB}", "13/10/2017");
map.put("{1C980E19-4A39-4ACD-BA8A-925D3E525765}", "13/10/2017");
map.put("{AABDDC3A-93A4-4A87-9506-AAF52E74012B}", "07/02/2018"); //DR N DRIVER AND PARTNERS (CDB 4419);F84086
map.put("{90C2959C-0C2D-43DC-A81B-4AD594C17999}", "20/04/2017");
map.put("{1F1669CF-1BB0-47A7-8FBF-BE65651644C1}", "15/10/2017");
map.put("{C1800BE8-4C1D-4340-B0F2-7ED208586ED3}", "15/10/2017");
map.put("{55A94703-4582-46FB-808A-1990E9CBCB6F}", "19/02/2018"); //Stamford Hill Group Practice (CDB 56);F84013
map.put("{D4996E62-268F-4759-83A6-7A68D0B38CEC}", "27/04/2017");
map.put("{3C843BBA-C507-4A95-9934-1A85B977C7B8}", "01/02/2017");
map.put("{2216253B-705D-4C46-ADB3-ED48493D6A39}", "03/02/2018"); //RIVERSIDE MEDICAL PRACTICE (CDB 14675);Y01962
map.put("{00123F97-4557-44AD-81B5-D9902DD72EE9}", "28/04/2017");
map.put("{E35D4D12-E7D2-484B-BFF6-4653B3FED228}", "15/10/2017");
map.put("{6D8B4D28-838B-4915-A148-6FEC2CEBCE77}", "05/07/2017");
map.put("{188D5B4D-4BF6-46E3-AF11-3AD32C68D251}", "19/10/2017");
map.put("{16F7DDE1-3763-4D3A-A58D-F12F967718CF}", "02/11/2017");
map.put("{03148933-6E1C-4A8A-A6D2-A3D488E14DDD}", "30/12/2017");
map.put("{16DE1A3C-875B-4AB2-B227-8A42604E029C}", "05/11/2017");
map.put("{D628D1BC-D02E-4101-B8CD-5B3DB2D06FC1}", "05/05/2017");
map.put("{1EA6259A-6A49-46DB-991D-D604675F87E2}", "15/10/2017");
map.put("{817F9B46-AEE0-45D5-95E3-989F75C4844E}", "20/04/2017");
map.put("{1C422471-F52A-4C30-8D23-140BEB7AAEFC}", "15/08/2017");
map.put("{A6467E73-0F15-49D6-AFAB-4DFB487E7963}", "10/05/2017");
map.put("{CC7D1781-1B85-4AD6-A5DD-9AD5E092E8DB}", "13/10/2017");
map.put("{167CD5C8-148F-4D78-8997-3B22EC0AF6B6}", "13/10/2017");
map.put("{9DD5D2CE-2585-49D8-AF04-2CB1BD137594}", "15/10/2017");
map.put("{D6696BB5-DE69-49D1-BC5E-C56799E42640}", "07/02/2018"); //BOLEYN MEDICAL CENTRE (CDB 4841);F84050
map.put("{169375A9-C3AB-4C5E-82B0-DFF7656AD1FA}", "20/04/2017");
map.put("{0A8ECFDE-95EE-4811-BC05-668D49F5C799}", "19/11/2017");
map.put("{79C898A1-BB92-48F9-B0C3-6725370132B5}", "20/10/2017");
map.put("{472AC9BA-AFFE-4E81-81CA-40DD8389784D}", "27/04/2017");
map.put("{00121CB7-76A6-4D57-8260-E9CA62FFCD77}", "13/10/2017");
map.put("{0FCBA0A7-7CAB-4E75-AC81-5041CD869CA1}", "15/10/2017");
map.put("{00A9C32D-2BB2-4A20-842A-381B3F2031C0}", "19/10/2017");
map.put("{26597C5A-3E29-4960-BE11-AC75D0430615}", "03/05/2017");
map.put("{D945FEF7-F5EF-422B-AB35-6937F9792B54}", "15/10/2017");
map.put("{16D685C6-130A-4B19-BCA9-90AC7DC16346}", "08/07/2017");
map.put("{F09E9CEF-2615-4C9D-AA3D-79E0AB10D0B3}", "13/10/2017");
map.put("{CD7EF748-DB88-49CF-AA6E-24F65029391F}", "15/10/2017");
map.put("{B22018CF-2B52-4A1A-9F6A-CEA13276DB2E}", "19/10/2017");
map.put("{0DF8CFC7-5DE6-4DDB-846A-7F28A2740A00}", "02/12/2017");
map.put("{50F439E5-DB18-43A0-9F25-825957013A07}", "11/01/2018"); //DR PI ABIOLA (CDB 5681);F84631
map.put("{00A3BA25-21C6-42DE-82AA-55FF0D85A6C3}", "31/10/2018"); //MARKET STREET HEALTH GROUP (CDB 381);F84004
map.put("{77B59D29-0FD9-4737-964F-5DBA49D94AB6}", "31/10/2018"); //Star Lane Medical Centre (CDB 40);F84017
map.put("{91239362-A105-4DEA-8E8E-239C3BCEDFD2}", "11/01/2018"); //BEECHWOOD MEDICAL CENTRE (CDB 5661);F84038
map.put("{53A113F5-6E3B-410F-A473-53E38A79335B}", "01/06/2018"); //ELFT Community RWKGY CDB 25362
map.put("{164BE8EC-E2D5-40DE-A5FC-25E058A5C47E}", "17/10/2018"); //Haiderian Medical Centre F82002
map.put("{164CE1B0-F7B3-44AF-B1E4-3DA6C64DEA4C}", "26/11/2018"); //THE GREEN WOOD PRACTICE F82007
map.put("{A30A4BB7-B17B-11D9-AD5F-00D0B77FCBFC}", "26/11/2018"); //Tulasi Medical Practice F82660
LOG.debug("Starting with map size " + map.size());
Map<String, String> hmGuidToOdsMap = new HashMap<>();
UUID systemId = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
UUID serviceId = service.getId();
String ods = service.getLocalId();
String orgGuid = null;
List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 5);
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (!files.isEmpty()) {
ExchangePayloadFile first = files.get(0);
String path = first.getPath();
if (path.indexOf("EMIS_CUSTOM") > -1) {
continue;
}
File f = new File(path);
f = f.getParentFile(); //org GUID
orgGuid = f.getName();
break;
}
}
if (orgGuid == null) {
LOG.error("Failed to find OrgGuid for " + service.getName() + " " + ods);
} else {
hmGuidToOdsMap.put(orgGuid, ods);
}
}
//create new code
for (String orgGuid: map.keySet()) {
String dateStr = map.get(orgGuid);
String odsCode = hmGuidToOdsMap.get(orgGuid);
if (Strings.isNullOrEmpty(odsCode)) {
LOG.error("Missing ODS code for " + orgGuid);
} else {
System.out.println("map.put(\"" + odsCode + "\", \"" + dateStr + "\");");
}
}
LOG.debug("Finished Converting Emis Guid");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) {
LOG.debug("Testing S3 vs MySQL for service " + serviceUuid);
try {
//retrieve some audit JSON from the DB
EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
String sql = "select resource_id, resource_type, version, mappings_json"
+ " from resource_field_mappings"
+ " where mappings_json != '[]'";
if (count > -1) {
sql += "limit " + count + ";";
}
Statement statement = connection.createStatement();
statement.setFetchSize(1000);
ResultSet rs = statement.executeQuery(sql);
List<ResourceFieldMapping> list = new ArrayList<>();
while (rs.next()) {
int col = 1;
String resourceId = rs.getString(col++);
String resourceType = rs.getString(col++);
String version = rs.getString(col++);
String json = rs.getString(col++);
ResourceFieldMapping obj = new ResourceFieldMapping();
obj.setResourceId(UUID.fromString(resourceId));
obj.setResourceType(resourceType);
obj.setVersion(UUID.fromString(version));
obj.setResourceField(json);
list.add(obj);
}
rs.close();
statement.close();
entityManager.close();
int done = 0;
//test writing to S3
long s3Start = System.currentTimeMillis();
LOG.debug("Doing S3 test");
for (int i=0; i<list.size(); i++) {
ResourceFieldMapping mapping = list.get(i);
String entryName = mapping.getVersion().toString() + ".json";
String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip";
String jsonStr = mapping.getResourceField();
//may as well zip the data, since it will compress well
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
zos.putNextEntry(new ZipEntry(entryName));
zos.write(jsonStr.getBytes());
zos.flush();
zos.close();
byte[] bytes = baos.toByteArray();
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
//ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider();
DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance();
AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder
.standard()
.withCredentials(credentialsProvider)
.withRegion(Regions.EU_WEST_2);
AmazonS3 s3Client = clientBuilder.build();
ObjectMetadata objectMetadata = new ObjectMetadata();
objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION);
objectMetadata.setContentLength(bytes.length);
PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata);
s3Client.putObject(putRequest);
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + list.size());
}
}
long s3End = System.currentTimeMillis();
LOG.debug("S3 took " + (s3End - s3Start) + " ms");
//test inserting into a DB
long sqlStart = System.currentTimeMillis();
LOG.debug("Doing SQL test");
sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)";
entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
session = (SessionImpl) entityManager.getDelegate();
connection = session.connection();
PreparedStatement ps = connection.prepareStatement(sql);
entityManager.getTransaction().begin();
done = 0;
int currentBatchSize = 0;
for (int i=0; i<list.size(); i++) {
ResourceFieldMapping mapping = list.get(i);
int col = 1;
ps.setString(col++, mapping.getResourceId().toString());
ps.setString(col++, mapping.getResourceType());
ps.setDate(col++, new java.sql.Date(System.currentTimeMillis()));
ps.setString(col++, mapping.getVersion().toString());
ps.setString(col++, mapping.getResourceField());
ps.addBatch();
currentBatchSize ++;
if (currentBatchSize >= sqlBatchSize
|| i+1 == list.size()) {
ps.executeBatch();
entityManager.getTransaction().commit();
//mirror what would happen normally
ps.close();
entityManager.close();
if (i+1 < list.size()) {
entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid);
session = (SessionImpl) entityManager.getDelegate();
connection = session.connection();
ps = connection.prepareStatement(sql);
entityManager.getTransaction().begin();
}
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + list.size());
}
}
long sqlEnd = System.currentTimeMillis();
LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms");
LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void loadEmisData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) {
LOG.debug("Loading Emis data from into " + dbUrl);
try {
//hash file type of every file
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword);
SimpleDateFormat sdfStart = new SimpleDateFormat("yyyy-MM-dd");
Date startDate = sdfStart.parse("2000-01-01");
for (int i = exchanges.size() - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (files.isEmpty()) {
continue;
}
for (ExchangePayloadFile file : files) {
String type = file.getType();
String path = file.getPath();
//if only doing a specific file type, skip all others
if (onlyThisFileType != null
&& !type.equals(onlyThisFileType)) {
continue;
}
String name = FilenameUtils.getBaseName(path);
String[] toks = name.split("_");
if (toks.length != 5) {
throw new TransformException("Failed to find extract date in filename " + path);
}
String dateStr = toks[3];
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
Date extractDate = sdf.parse(dateStr);
boolean processFile = false;
if (type.equalsIgnoreCase("OriginalTerms")
|| type.equalsIgnoreCase("RegistrationStatus")) {
//can't process these custom files in this routine
} else if (type.equalsIgnoreCase("Coding_ClinicalCode")
|| type.equalsIgnoreCase("Coding_DrugCode")) {
processFile = true;
} else {
if (!extractDate.before(startDate)) {
processFile = true;
}
}
if (processFile) {
loadEmisDataFromFile(conn, path, type, extractDate);
}
}
}
conn.close();
LOG.debug("Finished Emis data from into " + dbUrl);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static ParserI createParserForEmisFileType(String fileType, String filePath) {
String[] toks = fileType.split("_");
String domain = toks[0];
String name = toks[1];
String first = domain.substring(0, 1);
String last = domain.substring(1);
domain = first.toLowerCase() + last;
try {
String clsName = "org.endeavourhealth.transform.emis.csv.schema." + domain + "." + name;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
return constructor.newInstance(null, null, null, EmisCsvToFhirTransformer.VERSION_5_4, filePath);
} catch (Exception ex) {
LOG.error("No parser for file type [" + fileType + "]");
LOG.error("", ex);
return null;
}
}
private static void loadEmisDataFromFile(Connection conn, String filePath, String fileType, Date extractDate) throws Exception {
LOG.debug("Loading " + fileType + ": " + filePath);
String fileName = FilenameUtils.getName(filePath);
ParserI parser = createParserForEmisFileType(fileType, filePath);
if (parser == null) {
return;
}
String table = fileType.replace(" ", "_");
//check table is there
String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(sql);
boolean tableExists = rs.next();
rs.close();
statement.close();
if (!tableExists) {
LOG.error("No table exists for " + table);
return;
}
//create insert statement
sql = "INSERT INTO `" + table + "` (";
sql += "file_name, extract_date";
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
}
sql += ") VALUES (";
sql += "?, ?";
for (String col : cols) {
sql += ", ";
sql += "?";
}
sql += ")";
PreparedStatement ps = conn.prepareStatement(sql);
List<String> currentBatchStrs = new ArrayList<>();
//load table
try {
int done = 0;
int currentBatchSize = 0;
while (parser.nextRecord()) {
int col = 1;
//file name is always first
ps.setString(col++, fileName);
ps.setDate(col++, new java.sql.Date(extractDate.getTime()));
for (String colName : cols) {
CsvCell cell = parser.getCell(colName);
if (cell == null) {
ps.setNull(col++, Types.VARCHAR);
} else {
ps.setString(col++, cell.getString());
}
}
ps.addBatch();
currentBatchSize++;
currentBatchStrs.add((ps.toString())); //for error handling
if (currentBatchSize >= 5) {
ps.executeBatch();
currentBatchSize = 0;
currentBatchStrs.clear();
}
done++;
if (done % 5000 == 0) {
LOG.debug("Done " + done);
}
}
if (currentBatchSize >= 0) {
ps.executeBatch();
}
ps.close();
} catch (Throwable t) {
LOG.error("Failed on batch with statements:");
for (String currentBatchStr : currentBatchStrs) {
LOG.error(currentBatchStr);
}
throw t;
}
LOG.debug("Finished " + fileType + ": " + filePath);
}
private static void createBartsDataTables() {
LOG.debug("Creating Barts data tables");
try {
List<String> fileTypes = new ArrayList<>();
fileTypes.add("AEATT");
fileTypes.add("Birth");
//fileTypes.add("BulkDiagnosis");
//fileTypes.add("BulkProblem");
//fileTypes.add("BulkProcedure");
fileTypes.add("CLEVE");
fileTypes.add("CVREF");
fileTypes.add("Diagnosis");
fileTypes.add("ENCINF");
fileTypes.add("ENCNT");
fileTypes.add("FamilyHistory");
fileTypes.add("IPEPI");
fileTypes.add("IPWDS");
fileTypes.add("LOREF");
fileTypes.add("NOMREF");
fileTypes.add("OPATT");
fileTypes.add("ORGREF");
fileTypes.add("PPADD");
fileTypes.add("PPAGP");
fileTypes.add("PPALI");
fileTypes.add("PPATI");
fileTypes.add("PPINF");
fileTypes.add("PPNAM");
fileTypes.add("PPPHO");
fileTypes.add("PPREL");
fileTypes.add("Pregnancy");
fileTypes.add("Problem");
fileTypes.add("PROCE");
fileTypes.add("Procedure");
fileTypes.add("PRSNLREF");
fileTypes.add("SusEmergency");
fileTypes.add("SusInpatient");
fileTypes.add("SusOutpatient");
//fileTypes.add("Tails"); TODO - have three separate tails files
fileTypes.add("EventCode");
fileTypes.add("EventSetCanon");
fileTypes.add("EventSet");
fileTypes.add("EventSetExplode");
fileTypes.add("BlobContent");
fileTypes.add("SusInpatientTail");
fileTypes.add("SusOutpatientTail");
fileTypes.add("SusEmergencyTail");
fileTypes.add("AEINV");
fileTypes.add("AETRE");
fileTypes.add("OPREF");
fileTypes.add("STATREF");
fileTypes.add("RTTPE");
fileTypes.add("PPATH");
fileTypes.add("DOCRP");
fileTypes.add("SCHAC");
fileTypes.add("EALEN");
fileTypes.add("DELIV");
fileTypes.add("EALOF");
fileTypes.add("SusEmergencyCareDataSet");
fileTypes.add("SusEmergencyCareDataSetTail");
for (String fileType : fileTypes) {
createBartsDataTable(fileType);
}
LOG.debug("Finished Creating Barts data tables");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createBartsDataTable(String fileType) throws Exception {
ParserI parser = null;
try {
String clsName = "org.endeavourhealth.transform.barts.schema." + fileType;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
parser = constructor.newInstance(null, null, null, null, null);
} catch (ClassNotFoundException cnfe) {
System.out.println("-- No parser for file type [" + fileType + "]");
return;
}
System.out.println("-- " + fileType);
String table = fileType.replace(" ", "_");
String dropSql = "DROP TABLE IF EXISTS `" + table + "`;";
System.out.println(dropSql);
String sql = "CREATE TABLE `" + table + "` (";
sql += "file_name varchar(100)";
if (parser instanceof AbstractFixedParser) {
AbstractFixedParser fixedParser = (AbstractFixedParser) parser;
List<FixedParserField> fields = fixedParser.getFieldList();
for (FixedParserField field : fields) {
String col = field.getName();
int len = field.getFieldlength();
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
sql += " varchar(";
sql += len;
sql += ")";
}
} else {
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
if (col.equals("BLOB_CONTENTS")
|| col.equals("VALUE_LONG_TXT")
|| col.equals("COMMENT_TXT")
|| col.equals("NONPREG_REL_PROBLM_SCT_CD")) {
sql += " mediumtext";
} else if (col.indexOf("Date") > -1
|| col.indexOf("Time") > -1) {
sql += " varchar(10)";
} else {
sql += " varchar(255)";
}
}
}
sql += ");";
/*LOG.debug("-- fileType");
LOG.debug(sql);*/
System.out.println(sql);
}
private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String startDateStr, String onlyThisFileType) {
LOG.debug("Loading Barts data from into " + dbUrl);
try {
//hash file type of every file
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword);
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
Date startDate = sdf.parse(startDateStr);
for (int i = exchanges.size() - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
String exchangeBody = exchange.getBody();
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody);
if (files.isEmpty()) {
continue;
}
for (ExchangePayloadFile file : files) {
String type = file.getType();
String path = file.getPath();
//if only doing a specific file type, skip all others
if (onlyThisFileType != null
&& !type.equals(onlyThisFileType)) {
continue;
}
boolean processFile = false;
if (type.equalsIgnoreCase("CVREF")
|| type.equalsIgnoreCase("LOREF")
|| type.equalsIgnoreCase("ORGREF")
|| type.equalsIgnoreCase("PRSNLREF")
|| type.equalsIgnoreCase("NOMREF")) {
processFile = true;
} else {
File f = new File(path);
File parentFile = f.getParentFile();
String parentDir = parentFile.getName();
Date extractDate = sdf.parse(parentDir);
if (!extractDate.before(startDate)) {
processFile = true;
}
/*if (!extractDate.before(startDate)
&& !extractDate.after(endDate)) {
processFile = true;
}*/
}
if (processFile) {
loadBartsDataFromFile(conn, path, type);
}
}
}
conn.close();
LOG.debug("Finished Loading Barts data from into " + dbUrl);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception {
LOG.debug("Loading " + fileType + ": " + filePath);
String fileName = FilenameUtils.getName(filePath);
ParserI parser = null;
try {
String clsName = "org.endeavourhealth.transform.barts.schema." + fileType;
Class cls = Class.forName(clsName);
//now construct an instance of the parser for the file we've found
Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class);
parser = constructor.newInstance(null, null, null, null, filePath);
} catch (ClassNotFoundException cnfe) {
LOG.error("No parser for file type [" + fileType + "]");
return;
}
String table = fileType.replace(" ", "_");
//check table is there
String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1";
Statement statement = conn.createStatement();
ResultSet rs = statement.executeQuery(sql);
boolean tableExists = rs.next();
rs.close();
statement.close();
if (!tableExists) {
LOG.error("No table exists for " + table);
return;
}
//create insert statement
sql = "INSERT INTO `" + table + "` (";
sql += "file_name";
List<String> cols = parser.getColumnHeaders();
for (String col : cols) {
sql += ", ";
sql += col.replace(" ", "_").replace("#", "").replace("/", "");
}
sql += ") VALUES (";
sql += "?";
for (String col : cols) {
sql += ", ";
sql += "?";
}
sql += ")";
PreparedStatement ps = conn.prepareStatement(sql);
List<String> currentBatchStrs = new ArrayList<>();
//load table
try {
int done = 0;
int currentBatchSize = 0;
while (parser.nextRecord()) {
int col = 1;
//file name is always first
ps.setString(col++, fileName);
for (String colName : cols) {
CsvCell cell = parser.getCell(colName);
if (cell == null) {
ps.setNull(col++, Types.VARCHAR);
} else {
ps.setString(col++, cell.getString());
}
}
ps.addBatch();
currentBatchSize++;
currentBatchStrs.add((ps.toString())); //for error handling
if (currentBatchSize >= 5) {
ps.executeBatch();
currentBatchSize = 0;
currentBatchStrs.clear();
}
done++;
if (done % 5000 == 0) {
LOG.debug("Done " + done);
}
}
if (currentBatchSize >= 0) {
ps.executeBatch();
}
ps.close();
} catch (Throwable t) {
LOG.error("Failed on batch with statements:");
for (String currentBatchStr : currentBatchStrs) {
LOG.error(currentBatchStr);
}
throw t;
}
LOG.debug("Finished " + fileType + ": " + filePath);
}
/*private static void fixPseudoIds(String subscriberConfig, int threads) {
LOG.debug("Fixing Pseudo IDs for " + subscriberConfig);
try {
//update psuedo ID on patient table
//update psuedo ID on person table
//update pseudo ID on subscriber_transform mapping table
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
JsonNode saltNode = config.get("pseudonymisation");
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(saltNode.toString(), Object.class);
String linkDistributors = mapper.writeValueAsString(json);
LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class);
LinkDistributorConfig[] arr = null;
JsonNode linkDistributorsNode = config.get("linkedDistributors");
if (linkDistributorsNode != null) {
json = mapper.readValue(linkDistributorsNode.toString(), Object.class);
linkDistributors = mapper.writeValueAsString(json);
arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class);
}
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
List<Long> patientIds = new ArrayList<>();
Map<Long, Long> hmOrgIds = new HashMap<>();
Map<Long, Long> hmPersonIds = new HashMap<>();
String sql = "SELECT id, organization_id, person_id FROM patient";
Statement statement = subscriberConnection.createStatement();
statement.setFetchSize(10000);
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
long patientId = rs.getLong(1);
long orgId = rs.getLong(2);
long personId = rs.getLong(3);
patientIds.add(new Long(patientId));
hmOrgIds.put(new Long(patientId), new Long(orgId));
hmPersonIds.put(new Long(patientId), new Long(personId));
}
rs.close();
subscriberConnection.close();
LOG.debug("Found " + patientIds.size() + " patients");
AtomicInteger done = new AtomicInteger();
int pos = 0;
List<Thread> threadList = new ArrayList<>();
for (int i=0; i<threads; i++) {
List<Long> patientSubset = new ArrayList<>();
int count = patientIds.size() / threads;
if (i+1 == threads) {
count = patientIds.size() - pos;
}
for (int j=0; j<count; j++) {
Long patientId = patientIds.get(pos);
patientSubset.add(patientId);
pos ++;
}
FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done);
Thread t = new Thread(runnable);
t.start();
threadList.add(t);
}
while (true) {
Thread.sleep(5000);
boolean allDone = true;
for (Thread t: threadList) {
if (t.getState() != Thread.State.TERMINATED) {
//if (!t.isAlive()) {
allDone = false;
break;
}
}
if (allDone) {
break;
}
}
LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig);
} catch (Throwable t) {
LOG.error("", t);
}
}
static class FixPseudoIdRunnable implements Runnable {
private String subscriberConfig = null;
private List<Long> patientIds = null;
private Map<Long, Long> hmOrgIds = null;
private Map<Long, Long> hmPersonIds = null;
private AtomicInteger done = null;
public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) {
this.subscriberConfig = subscriberConfig;
this.patientIds = patientIds;
this.hmOrgIds = hmOrgIds;
this.hmPersonIds = hmPersonIds;
this.done = done;
}
@Override
public void run() {
try {
doRun();
} catch (Throwable t) {
LOG.error("", t);
}
}
private void doRun() throws Exception {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
Statement statement = subscriberConnection.createStatement();
JsonNode saltNode = config.get("pseudonymisation");
ObjectMapper mapper = new ObjectMapper();
Object json = mapper.readValue(saltNode.toString(), Object.class);
String linkDistributors = mapper.writeValueAsString(json);
LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class);
LinkDistributorConfig[] arr = null;
JsonNode linkDistributorsNode = config.get("linkedDistributors");
if (linkDistributorsNode != null) {
json = mapper.readValue(linkDistributorsNode.toString(), Object.class);
linkDistributors = mapper.writeValueAsString(json);
arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class);
}
//PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig);
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection subscriberTransformConnection = session.connection();
Statement subscriberTransformStatement = subscriberTransformConnection.createStatement();
String sql = null;
ResultSet rs = null;
for (Long patientId: patientIds) {
Long orgId = hmOrgIds.get(patientId);
Long personId = hmPersonIds.get(patientId);
//find service ID
sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId;
rs = subscriberTransformStatement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId);
}
String serviceId = rs.getString(1);
rs.close();
//find patient ID
sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId;
rs = subscriberTransformStatement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find resource iD for patient ID " + patientId);
}
String resourceType = rs.getString(1);
String resourceId = rs.getString(2);
rs.close();
if (!resourceType.equals("Patient")) {
throw new Exception("Not a patient resource type for enterprise ID " + patientId);
}
//get patient
Resource resource = null;
try {
resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId);
} catch (Exception ex) {
throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex);
}
if (resource == null) {
LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
continue;
//throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
}
Patient patient = (Patient)resource;
//generate new pseudo ID
String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt);
//save to person
if (Strings.isNullOrEmpty(pseudoId)) {
sql = "UPDATE person"
+ " SET pseudo_id = null"
+ " WHERE id = " + personId;
statement.executeUpdate(sql);
} else {
sql = "UPDATE person"
+ " SET pseudo_id = '" + pseudoId + "'"
+ " WHERE id = " + personId;
statement.executeUpdate(sql);
}
//save to patient
if (Strings.isNullOrEmpty(pseudoId)) {
sql = "UPDATE patient"
+ " SET pseudo_id = null"
+ " WHERE id = " + patientId;
statement.executeUpdate(sql);
} else {
sql = "UPDATE patient"
+ " SET pseudo_id = '" + pseudoId + "'"
+ " WHERE id = " + patientId;
statement.executeUpdate(sql);
}
//linked distributers
if (arr != null) {
for (LinkDistributorConfig linked: arr) {
String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked);
sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')"
+ " ON DUPLICATE KEY UPDATE"
+ " target_salt_key_name = VALUES(target_salt_key_name),"
+ " target_skid = VALUES(target_skid)";
statement.executeUpdate(sql);
}
}
//save to subscriber transform
sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'";
subscriberTransformStatement.executeUpdate(sql);
if (!Strings.isNullOrEmpty(pseudoId)) {
sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')";
subscriberTransformStatement.executeUpdate(sql);
}
subscriberConnection.commit();
subscriberTransformConnection.commit();
int doneLocal = done.incrementAndGet();
if (doneLocal % 1000 == 0) {
LOG.debug("Done " + doneLocal);
}
}
statement.close();
subscriberTransformStatement.close();
subscriberConnection.close();
subscriberTransformConnection.close();
}
}*/
/*private static void fixDeceasedPatients(String subscriberConfig) {
LOG.debug("Fixing Deceased Patients for " + subscriberConfig);
try {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber");
Connection subscriberConnection = EnterpriseFiler.openConnection(config);
Map<Long, Long> patientIds = new HashMap<>();
String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL";
Statement statement = subscriberConnection.createStatement();
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
long patientId = rs.getLong(1);
long orgId = rs.getLong(2);
patientIds.put(new Long(patientId), new Long(orgId));
}
rs.close();
statement.close();
EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig);
EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection subscriberTransformConnection = session.connection();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
for (Long patientId: patientIds.keySet()) {
Long orgId = patientIds.get(patientId);
statement = subscriberTransformConnection.createStatement();
sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId;
rs = statement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId);
}
String serviceId = rs.getString(1);
rs.close();
sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId;
rs = statement.executeQuery(sql);
if (!rs.next()) {
throw new Exception("Failed to find resource iD for patient ID " + patientId);
}
String resourceType = rs.getString(1);
String resourceId = rs.getString(2);
rs.close();
statement.close();
Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId);
if (resource == null) {
LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
continue;
//throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId);
}
Patient patient = (Patient)resource;
Date dob = patient.getBirthDate();
Date dod = patient.getDeceasedDateTimeType().getValue();
Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod);
updateEnterprisePatient(patientId, ages, subscriberConnection);
updateEnterprisePerson(patientId, ages, subscriberConnection);
}
subscriberConnection.close();
subscriberTransformConnection.close();
LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception {
//the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement
StringBuilder sb = new StringBuilder();
sb.append("UPDATE patient SET ");
sb.append("age_years = ?, ");
sb.append("age_months = ?, ");
sb.append("age_weeks = ? ");
sb.append("WHERE id = ?");
PreparedStatement update = connection.prepareStatement(sb.toString());
if (ages[EnterpriseAge.UNIT_YEARS] == null) {
update.setNull(1, Types.INTEGER);
} else {
update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]);
}
if (ages[EnterpriseAge.UNIT_MONTHS] == null) {
update.setNull(2, Types.INTEGER);
} else {
update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]);
}
if (ages[EnterpriseAge.UNIT_WEEKS] == null) {
update.setNull(3, Types.INTEGER);
} else {
update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]);
}
update.setLong(4, enterprisePatientId);
update.addBatch();
update.executeBatch();
connection.commit();
LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks");
}
private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception {
//update the age fields on the person table where the person is for our patient and their pseudo IDs match
StringBuilder sb = new StringBuilder();
sb.append("UPDATE patient, person SET ");
sb.append("person.age_years = ?, ");
sb.append("person.age_months = ?, ");
sb.append("person.age_weeks = ? ");
sb.append("WHERE patient.id = ? ");
sb.append("AND patient.person_id = person.id ");
sb.append("AND patient.pseudo_id = person.pseudo_id");
PreparedStatement update = connection.prepareStatement(sb.toString());
if (ages[EnterpriseAge.UNIT_YEARS] == null) {
update.setNull(1, Types.INTEGER);
} else {
update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]);
}
if (ages[EnterpriseAge.UNIT_MONTHS] == null) {
update.setNull(2, Types.INTEGER);
} else {
update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]);
}
if (ages[EnterpriseAge.UNIT_WEEKS] == null) {
update.setNull(3, Types.INTEGER);
} else {
update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]);
}
update.setLong(4, enterprisePatientId);
update.addBatch();
update.executeBatch();
connection.commit();
}
/*private static void testS3Read(String s3BucketName, String keyName, String start, String len) {
LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes");
try {
AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder
.standard()
.withCredentials(DefaultAWSCredentialsProviderChain.getInstance())
.withRegion(Regions.EU_WEST_2);
AmazonS3 s3Client = clientBuilder.build();
GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName);
long startInt = Long.parseLong(start);
long lenInt = Long.parseLong(len);
long endInt = startInt + lenInt;
request.setRange(startInt, endInt);
long startMs = System.currentTimeMillis();
S3Object object = s3Client.getObject(request);
InputStream inputStream = object.getObjectContent();
InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset());
StringBuilder sb = new StringBuilder();
char[] buf = new char[100];
while (true) {
int read = reader.read(buf);
if (read == -1
|| sb.length() >= lenInt) {
break;
}
sb.append(buf, 0, read);
}
reader.close();
long endMs = System.currentTimeMillis();
LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms");
LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
*//*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//*
*//*sql = "SELECT * FROM resource_field_mappings WHERE version = ?";
PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//*
*//*statement1.setString(1, resourceType);
statement1.setString(3, resourceVersion);*//*
/*private static void fixBartsPatients(UUID serviceId) {
LOG.debug("Fixing Barts patients at service " + serviceId);
try {
EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)edsEntityManager.getDelegate();
Connection edsConnection = session.connection();
int checked = 0;
int fixed = 0;
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';";
Statement s = edsConnection.createStatement();
s.setFetchSize(10000); //don't get all rows at once
ResultSet rs = s.executeQuery(sql);
LOG.info("Got raw results back");
while (rs.next()) {
String patientId = rs.getString(1);
ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId));
if (wrapper == null) {
LOG.error("Failed to get recource current for ID " + patientId);
continue;
}
String oldJson = wrapper.getResourceData();
Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson);
PatientBuilder patientBuilder = new PatientBuilder(patient);
List<String> numbersFromCsv = new ArrayList<>();
if (patient.hasTelecom()) {
for (ContactPoint contactPoint: patient.getTelecom()) {
if (contactPoint.hasId()) {
numbersFromCsv.add(contactPoint.getValue());
}
}
for (String numberFromCsv: numbersFromCsv) {
PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv);
}
}
List<HumanName> namesFromCsv = new ArrayList<>();
if (patient.hasName()) {
for (HumanName name: patient.getName()) {
if (name.hasId()) {
namesFromCsv.add(name);
}
}
for (HumanName name: namesFromCsv) {
PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name);
}
}
List<Address> addressesFromCsv = new ArrayList<>();
if (patient.hasAddress()) {
for (Address address: patient.getAddress()) {
if (address.hasId()) {
addressesFromCsv.add(address);
}
}
for (Address address: addressesFromCsv) {
PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address);
}
}
String newJson = FhirSerializationHelper.serializeResource(patient);
if (!newJson.equals(oldJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed ++;
}
checked ++;
if (checked % 1000 == 0) {
LOG.debug("Checked " + checked + " fixed " + fixed);
}
}
LOG.debug("Checked " + checked + " fixed " + fixed);
rs.close();
s.close();
edsEntityManager.close();
LOG.debug("Finish Fixing Barts patients at service " + serviceId);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void postToRabbit(String exchangeName, String srcFile, Integer throttle) {
LOG.info("Posting to " + exchangeName + " from " + srcFile);
if (throttle != null) {
LOG.info("Throttled to " + throttle + " messages/second");
}
try {
File src = new File(srcFile);
//create file of ones done
File dir = src.getParentFile();
String name = "DONE" + src.getName();
File dst = new File(dir, name);
Set<UUID> hsAlreadyDone = new HashSet<>();
if (dst.exists()) {
List<String> lines = Files.readAllLines(dst.toPath());
for (String line : lines) {
if (!Strings.isNullOrEmpty(line)) {
try {
UUID uuid = UUID.fromString(line);
hsAlreadyDone.add(uuid);
} catch (Exception ex) {
LOG.error("Skipping line " + line);
}
}
}
LOG.info("Already done " + hsAlreadyDone.size());
}
List<UUID> exchangeIds = new ArrayList<>();
int countTotal = 0;
List<String> lines = Files.readAllLines(src.toPath());
for (String line : lines) {
if (!Strings.isNullOrEmpty(line)) {
try {
UUID uuid = UUID.fromString(line);
countTotal++;
if (!hsAlreadyDone.contains(uuid)) {
exchangeIds.add(uuid);
}
} catch (Exception ex) {
LOG.error("Skipping line " + line);
}
}
}
LOG.info("Found " + countTotal + " down to " + exchangeIds.size() + " skipping ones already done, to post to " + exchangeName);
continueOrQuit();
FileWriter fileWriter = new FileWriter(dst, true);
PrintWriter printWriter = new PrintWriter(fileWriter);
long startMs = System.currentTimeMillis();
int doneThisSecond = 0;
LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName);
for (int i = 0; i < exchangeIds.size(); i++) {
UUID exchangeId = exchangeIds.get(i);
List<UUID> tmp = new ArrayList<>();
tmp.add(exchangeId);
QueueHelper.postToExchange(tmp, exchangeName, null, true);
printWriter.println(exchangeId.toString());
printWriter.flush();
if (i % 5000 == 0) {
LOG.debug("Done " + i + " / " + exchangeIds.size());
}
if (throttle != null) {
doneThisSecond++;
if (doneThisSecond > throttle.intValue()) {
long now = System.currentTimeMillis();
long sleep = 1000 - (now - startMs);
if (sleep > 0) {
Thread.sleep(sleep);
}
startMs = System.currentTimeMillis();
doneThisSecond = 0;
}
}
}
printWriter.close();
LOG.info("Finished Posting to " + exchangeName + " from " + srcFile);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void postToProtocol(String srcFile) {
LOG.info("Posting to protocol from " + srcFile);
try {
List<UUID> exchangeIds = new ArrayList<>();
List<String> lines = Files.readAllLines(new File(srcFile).toPath());
for (String line: lines) {
if (!Strings.isNullOrEmpty(line)) {
UUID uuid = UUID.fromString(line);
exchangeIds.add(uuid);
}
}
LOG.info("Posting " + exchangeIds.size() + " to Protocol queue");
QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false);
LOG.info("Finished Posting to protocol from " + srcFile);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void populateSubscriberUprnTable(String subscriberConfigName) throws Exception {
LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName);
try {
JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber");
//changed the format of the JSON
JsonNode pseudoNode = config.get("pseudonymisation");
boolean pseudonymised = pseudoNode != null;
byte[] saltBytes = null;
if (pseudonymised) {
JsonNode saltNode = pseudoNode.get("salt");
String base64Salt = saltNode.asText();
saltBytes = Base64.getDecoder().decode(base64Salt);
}
/*boolean pseudonymised = config.get("pseudonymised").asBoolean();
byte[] saltBytes = null;
if (pseudonymised) {
JsonNode saltNode = config.get("salt");
String base64Salt = saltNode.asText();
saltBytes = Base64.getDecoder().decode(base64Salt);
}*/
List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(config);
for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) {
Connection subscriberConnection = connectionWrapper.getConnection();
LOG.info("Populating " + connectionWrapper);
String upsertSql;
if (pseudonymised) {
upsertSql = "INSERT INTO patient_uprn"
+ " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)"
+ " VALUES"
+ " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ " ON DUPLICATE KEY UPDATE"
+ " organization_id = VALUES(organization_id),"
+ " person_id = VALUES(person_id),"
+ " lsoa_code = VALUES(lsoa_code),"
+ " pseudo_uprn = VALUES(pseudo_uprn),"
+ " qualifier = VALUES(qualifier),"
+ " `algorithm` = VALUES(`algorithm`),"
+ " `match` = VALUES(`match`),"
+ " no_address = VALUES(no_address),"
+ " invalid_address = VALUES(invalid_address),"
+ " missing_postcode = VALUES(missing_postcode),"
+ " invalid_postcode = VALUES(invalid_postcode)";
} else {
upsertSql = "INSERT INTO patient_uprn"
+ " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode)"
+ " VALUES"
+ " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
+ " ON DUPLICATE KEY UPDATE"
+ " organization_id = VALUES(organization_id),"
+ " person_id = VALUES(person_id),"
+ " lsoa_code = VALUES(lsoa_code),"
+ " uprn = VALUES(uprn),"
+ " qualifier = VALUES(qualifier),"
+ " `algorithm` = VALUES(`algorithm`),"
+ " `match` = VALUES(`match`),"
+ " no_address = VALUES(no_address),"
+ " invalid_address = VALUES(invalid_address),"
+ " missing_postcode = VALUES(missing_postcode),"
+ " invalid_postcode = VALUES(invalid_postcode)";
}
PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql);
int inBatch = 0;
EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl) edsEntityManager.getDelegate();
Connection edsConnection = session.connection();
EnterpriseIdDalI enterpriseIdDal = DalProvider.factoryEnterpriseIdDal(subscriberConfigName);
PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal();
PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal();
int checked = 0;
int saved = 0;
Map<String, Boolean> hmPermittedPublishers = new HashMap<>();
String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn";
Statement s = edsConnection.createStatement();
s.setFetchSize(10000); //don't get all rows at once
ResultSet rs = s.executeQuery(sql);
LOG.info("Got raw results back");
while (rs.next()) {
int col = 1;
String serviceId = rs.getString(col++);
String patientId = rs.getString(col++);
Long uprn = rs.getLong(col++);
if (rs.wasNull()) {
uprn = null;
}
String qualifier = rs.getString(col++);
String abpAddress = rs.getString(col++);
String algorithm = rs.getString(col++);
String match = rs.getString(col++);
boolean noAddress = rs.getBoolean(col++);
boolean invalidAddress = rs.getBoolean(col++);
boolean missingPostcode = rs.getBoolean(col++);
boolean invalidPostcode = rs.getBoolean(col++);
//check if patient ID already exists in the subscriber DB
Long subscriberPatientId = enterpriseIdDal.findEnterpriseId(ResourceType.Patient.toString(), patientId);
//if the patient doesn't exist on this subscriber DB, then don't transform this record
if (subscriberPatientId != null) {
//because of past mistakes, we have Discovery->Enterprise mappings for patients that
//shouldn't, so we also need to check that the service ID is definitely a publisher to this subscriber
Boolean isPublisher = hmPermittedPublishers.get(serviceId);
if (isPublisher == null) {
List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceId, null); //passing null means don't filter on system ID
for (LibraryItem libraryItem : libraryItems) {
Protocol protocol = libraryItem.getProtocol();
if (protocol.getEnabled() != ProtocolEnabled.TRUE) {
continue;
}
//check to make sure that this service is actually a PUBLISHER to this protocol
boolean isProtocolPublisher = false;
for (ServiceContract serviceContract : protocol.getServiceContract()) {
if (serviceContract.getType().equals(ServiceContractType.PUBLISHER)
&& serviceContract.getService().getUuid().equals(serviceId)
&& serviceContract.getActive() == ServiceContractActive.TRUE) {
isProtocolPublisher = true;
break;
}
}
if (!isProtocolPublisher) {
continue;
}
//check to see if this subscriber config is a subscriber to this DB
for (ServiceContract serviceContract : protocol.getServiceContract()) {
if (serviceContract.getType().equals(ServiceContractType.SUBSCRIBER)
&& serviceContract.getActive() == ServiceContractActive.TRUE) {
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
UUID subscriberServiceId = UUID.fromString(serviceContract.getService().getUuid());
UUID subscriberTechnicalInterfaceId = UUID.fromString(serviceContract.getTechnicalInterface().getUuid());
Service subscriberService = serviceRepository.getById(subscriberServiceId);
List<JsonServiceInterfaceEndpoint> serviceEndpoints = ObjectMapperPool.getInstance().readValue(subscriberService.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {
});
for (JsonServiceInterfaceEndpoint serviceEndpoint : serviceEndpoints) {
if (serviceEndpoint.getTechnicalInterfaceUuid().equals(subscriberTechnicalInterfaceId)) {
String protocolSubscriberConfigName = serviceEndpoint.getEndpoint();
if (protocolSubscriberConfigName.equals(subscriberConfigName)) {
isPublisher = new Boolean(true);
break;
}
}
}
}
}
}
if (isPublisher == null) {
isPublisher = new Boolean(false);
}
hmPermittedPublishers.put(serviceId, isPublisher);
}
if (isPublisher.booleanValue()) {
Long subscriberOrgId = enterpriseIdDal.findEnterpriseOrganisationId(serviceId);
String discoveryPersonId = patientLinkDal.getPersonId(patientId);
Long subscriberPersonId = enterpriseIdDal.findOrCreateEnterprisePersonId(discoveryPersonId);
String lsoaCode = null;
if (!Strings.isNullOrEmpty(abpAddress)) {
String[] toks = abpAddress.split(" ");
String postcode = toks[toks.length - 1];
PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode);
if (postcodeReference != null) {
lsoaCode = postcodeReference.getLsoaCode();
}
}
col = 1;
psUpsert.setLong(col++, subscriberPatientId);
psUpsert.setLong(col++, subscriberOrgId);
psUpsert.setLong(col++, subscriberPersonId);
psUpsert.setString(col++, lsoaCode);
if (pseudonymised) {
String pseuoUprn = null;
if (uprn != null) {
TreeMap<String, String> keys = new TreeMap<>();
keys.put("UPRN", "" + uprn);
Crypto crypto = new Crypto();
crypto.SetEncryptedSalt(saltBytes);
pseuoUprn = crypto.GetDigest(keys);
}
psUpsert.setString(col++, pseuoUprn);
} else {
if (uprn != null) {
psUpsert.setLong(col++, uprn.longValue());
} else {
psUpsert.setNull(col++, Types.BIGINT);
}
}
psUpsert.setString(col++, qualifier);
psUpsert.setString(col++, algorithm);
psUpsert.setString(col++, match);
psUpsert.setBoolean(col++, noAddress);
psUpsert.setBoolean(col++, invalidAddress);
psUpsert.setBoolean(col++, missingPostcode);
psUpsert.setBoolean(col++, invalidPostcode);
//LOG.debug("" + psUpsert);
psUpsert.addBatch();
inBatch++;
saved++;
if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) {
psUpsert.executeBatch();
subscriberConnection.commit();
inBatch = 0;
}
}
}
checked++;
if (checked % 1000 == 0) {
LOG.info("Checked " + checked + " Saved " + saved);
}
}
if (inBatch > 0) {
psUpsert.executeBatch();
subscriberConnection.commit();
}
LOG.info("Chcked " + checked + " Saved " + saved);
psUpsert.close();
subscriberConnection.close();
edsEntityManager.close();
subscriberConnection.close();
}
LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName);
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void fixPersonsNoNhsNumber() {
LOG.info("Fixing persons with no NHS number");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
List<Service> services = serviceDal.getAll();
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection patientSearchConnection = session.connection();
Statement patientSearchStatement = patientSearchConnection.createStatement();
for (Service service: services) {
LOG.info("Doing " + service.getName() + " " + service.getId());
int checked = 0;
int fixedPersons = 0;
int fixedSearches = 0;
String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)";
ResultSet rs = patientSearchStatement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String nhsNumber = rs.getString(2);
//find matched person ID
String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'";
Statement s = patientSearchConnection.createStatement();
ResultSet rsPersonId = s.executeQuery(personIdSql);
String personId = null;
if (rsPersonId.next()) {
personId = rsPersonId.getString(1);
}
rsPersonId.close();
s.close();
if (Strings.isNullOrEmpty(personId)) {
LOG.error("Patient " + patientId + " has no person ID");
continue;
}
//see whether person ID used NHS number to match
String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'";
s = patientSearchConnection.createStatement();
ResultSet rsPatientLink = s.executeQuery(patientLinkSql);
String matchingNhsNumber = null;
if (rsPatientLink.next()) {
matchingNhsNumber = rsPatientLink.getString(1);
}
rsPatientLink.close();
s.close();
//if patient link person has a record for this nhs number, update the person link
if (!Strings.isNullOrEmpty(matchingNhsNumber)) {
String newPersonId = UUID.randomUUID().toString();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String createdAtStr = sdf.format(new Date());
s = patientSearchConnection.createStatement();
//new record in patient link history
String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')";
//LOG.debug(patientHistorySql);
s.execute(patientHistorySql);
//update patient link
String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'";
s.execute(patientLinkUpdateSql);
patientSearchConnection.commit();
s.close();
fixedPersons ++;
}
//if patient search has an invalid NHS number, update it
if (!Strings.isNullOrEmpty(nhsNumber)) {
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId);
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
patientSearchDal.update(service.getId(), patient);
fixedSearches ++;
}
checked ++;
if (checked % 50 == 0) {
LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches);
}
}
LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches);
rs.close();
}
patientSearchStatement.close();
entityManager.close();
LOG.info("Finished fixing persons with no NHS number");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void checkDeletedObs(UUID serviceId, UUID systemId) {
LOG.info("Checking Observations for " + serviceId);
try {
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
List<ResourceType> potentialResourceTypes = new ArrayList<>();
potentialResourceTypes.add(ResourceType.Procedure);
potentialResourceTypes.add(ResourceType.AllergyIntolerance);
potentialResourceTypes.add(ResourceType.FamilyMemberHistory);
potentialResourceTypes.add(ResourceType.Immunization);
potentialResourceTypes.add(ResourceType.DiagnosticOrder);
potentialResourceTypes.add(ResourceType.Specimen);
potentialResourceTypes.add(ResourceType.DiagnosticReport);
potentialResourceTypes.add(ResourceType.ReferralRequest);
potentialResourceTypes.add(ResourceType.Condition);
potentialResourceTypes.add(ResourceType.Observation);
List<String> subscriberConfigs = new ArrayList<>();
subscriberConfigs.add("ceg_data_checking");
subscriberConfigs.add("ceg_enterprise");
subscriberConfigs.add("hurley_data_checking");
subscriberConfigs.add("hurley_deidentified");
Set<String> observationsNotDeleted = new HashSet<>();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
ExchangePayloadFile firstItem = payload.get(0);
//String version = EmisCsvToFhirTransformer.determineVersion(payload);
//if we've reached the point before we process data for this practice, break out
try {
if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) {
break;
}
} catch (TransformException e) {
LOG.info("Skipping exchange containing " + firstItem.getPath());
continue;
}
String name = FilenameUtils.getBaseName(firstItem.getPath());
String[] toks = name.split("_");
String agreementId = toks[4];
LOG.info("Doing exchange containing " + firstItem.getPath());
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true);
Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId());
for (ExchangeBatch batch : batches) {
if (batch.getEdsPatientId() != null) {
hmBatchesByPatient.put(batch.getEdsPatientId(), batch);
}
}
for (ExchangePayloadFile item : payload) {
String type = item.getType();
if (type.equals("CareRecord_Observation")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String deleted = record.get("Deleted");
String observationId = record.get("ObservationGuid");
if (deleted.equalsIgnoreCase("true")) {
//if observation was reinstated at some point, skip it
if (observationsNotDeleted.contains(observationId)) {
continue;
}
String patientId = record.get("PatientGuid");
CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId);
CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId);
Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell);
for (ResourceType resourceType: resourceTypes) {
//will already have been done OK
if (resourceType == ResourceType.Observation) {
continue;
}
String sourceId = patientId + ":" + observationId;
UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId);
if (uuid == null) {
throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId);
}
LOG.debug("Fixing " + resourceType + " " + uuid);
//create file of IDs to delete for each subscriber DB
for (String subscriberConfig : subscriberConfigs) {
EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig);
Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString());
if (enterpriseId == null) {
continue;
}
String sql = null;
if (resourceType == ResourceType.AllergyIntolerance) {
sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId;
} else if (resourceType == ResourceType.ReferralRequest) {
sql = "DELETE FROM referral_request WHERE id = " + enterpriseId;
} else {
sql = "DELETE FROM observation WHERE id = " + enterpriseId;
}
sql += "\n";
File f = new File(subscriberConfig + ".sql");
Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE);
}
//delete resource if not already done
ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid);
if (resourceWrapper != null && !resourceWrapper.isDeleted()) {
ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId());
resourceWrapper.setDeleted(true);
resourceWrapper.setResourceData(null);
resourceWrapper.setResourceMetadata("");
resourceWrapper.setExchangeBatchId(batch.getBatchId());
resourceWrapper.setVersion(UUID.randomUUID());
resourceWrapper.setCreatedAt(new Date());
resourceWrapper.setExchangeId(exchange.getId());
resourceDal.delete(resourceWrapper);
}
}
} else {
observationsNotDeleted.add(observationId);
}
}
parser.close();
}
}
}
LOG.info("Finished Checking Observations for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) {
LOG.info("Testing Batch Inserts");
try {
int inserts = Integer.parseInt(num);
int batchSize = Integer.parseInt(batchSizeStr);
LOG.info("Openning Connection");
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
//String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);";
String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)";
PreparedStatement ps = conn.prepareStatement(sql);
if (batchSize == 1) {
LOG.info("Testing non-batched inserts");
long start = System.currentTimeMillis();
for (int i = 0; i < inserts; i++) {
int col = 1;
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, randomStr());
ps.execute();
}
long end = System.currentTimeMillis();
LOG.info("Done " + inserts + " in " + (end - start) + " ms");
} else {
LOG.info("Testing batched inserts with batch size " + batchSize);
long start = System.currentTimeMillis();
for (int i = 0; i < inserts; i++) {
int col = 1;
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, UUID.randomUUID().toString());
ps.setString(col++, randomStr());
ps.addBatch();
if ((i + 1) % batchSize == 0
|| i + 1 >= inserts) {
ps.executeBatch();
}
}
long end = System.currentTimeMillis();
LOG.info("Done " + inserts + " in " + (end - start) + " ms");
}
ps.close();
conn.close();
LOG.info("Finished Testing Batch Inserts");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static String randomStr() {
StringBuffer sb = new StringBuffer();
Random r = new Random(System.currentTimeMillis());
while (sb.length() < 1100) {
sb.append(r.nextLong());
}
return sb.toString();
}
/*private static void fixEmisProblems(UUID serviceId, UUID systemId) {
LOG.info("Fixing Emis Problems for " + serviceId);
try {
Map<String, List<String>> hmReferences = new HashMap<>();
Set<String> patientIds = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null);
LOG.info("Caching problem links");
//Go through all files to work out problem children for every problem
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (int i=exchanges.size()-1; i>=0; i--) {
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
//String version = EmisCsvToFhirTransformer.determineVersion(payload);
ExchangePayloadFile firstItem = payload.get(0);
String name = FilenameUtils.getBaseName(firstItem.getPath());
String[] toks = name.split("_");
String agreementId = toks[4];
LOG.info("Doing exchange containing " + firstItem.getPath());
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true);
for (ExchangePayloadFile item: payload) {
String type = item.getType();
if (type.equals("CareRecord_Observation")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("ObservationGuid");
String localId = patientId + ":" + observationId;
ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId));
Reference localReference = ReferenceHelper.createReference(resourceType, localId);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else if (type.equals("Prescribing_DrugRecord")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemObservationGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("DrugRecordGuid");
String localId = patientId + ":" + observationId;
Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else if (type.equals("Prescribing_IssueRecord")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String parentProblemId = record.get("ProblemObservationGuid");
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
if (!Strings.isNullOrEmpty(parentProblemId)) {
String observationId = record.get("IssueRecordGuid");
String localId = patientId + ":" + observationId;
Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId);
String localProblemId = patientId + ":" + parentProblemId;
Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId);
Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper);
Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper);
String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference);
List<String> problemChildren = hmReferences.get(globalProblemId);
if (problemChildren == null) {
problemChildren = new ArrayList<>();
hmReferences.put(globalProblemId, problemChildren);
}
problemChildren.add(globalReference.getReference());
}
}
parser.close();
} else {
//no problem link
}
}
}
LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
for (String localPatientId: patientIds) {
Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId);
Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer);
String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference);
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString());
for (ResourceWrapper wrapper: wrappers) {
if (wrapper.isDeleted()) {
continue;
}
String originalJson = wrapper.getResourceData();
Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson);
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
//sort out the nested extension references
Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED);
if (outerExtension != null) {
Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER);
if (innerExtension != null) {
Reference performerReference = (Reference)innerExtension.getValue();
String value = performerReference.getReference();
if (value.endsWith("}")) {
Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer);
innerExtension.setValue(globalPerformerReference);
}
}
}
//sort out the contained list of children
ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder);
//remove any existing children
listBuilder.removeContainedList();
//add all the new ones we've found
List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString());
if (localChildReferences != null) {
for (String localChildReference: localChildReferences) {
Reference reference = ReferenceHelper.createReference(localChildReference);
listBuilder.addContainedListItem(reference);
}
}
//save the updated condition
String newJson = FhirSerializationHelper.serializeResource(condition);
if (!newJson.equals(originalJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed ++;
}
}
done ++;
if (done % 1000 == 0) {
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
}
}
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
LOG.info("Finished Emis Problems for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) {
try {
LOG.info("Doing fix for " + publisher);
String[] done = new String[]{
"01fcfe94-5dfd-4951-b74d-129f874209b0",
"07a267d3-189b-4968-b9b0-547de28edef5",
"0b9601d1-f7ab-4f5d-9f77-1841050f75ab",
"0fd2ff5d-2c25-4707-afe8-707e81a250b8",
"14276da8-c344-4841-a36d-aa38940e78e7",
"158251ca-0e1d-4471-8fae-250b875911e1",
"160131e2-a5ff-49c8-b62e-ae499a096193",
"16490f2b-62ce-44c6-9816-528146272340",
"18fa1bed-b9a0-4d55-a0cc-dfc31831259a",
"19cba169-d41e-424a-812f-575625c72305",
"19ff6a03-25df-4e61-9ab1-4573cfd24729",
"1b3d1627-f49e-4103-92d6-af6016476da3",
"1e198fbb-c9cd-429a-9b50-0f124d0d825c",
"20444fbe-0802-46fc-8203-339a36f52215",
"21e27bf3-8071-48dd-924f-1d8d21f9216f",
"23203e72-a3b0-4577-9942-30f7cdff358e",
"23be1f4a-68ec-4a49-b2ec-aa9109c99dcd",
"2b56033f-a9b4-4bab-bb53-c619bdb38895",
"2ba26f2d-8068-4b77-8e62-431edfc2c2e2",
"2ed89931-0ce7-49ea-88ac-7266b6c03be0",
"3abf8ded-f1b1-495b-9a2d-5d0223e33fa7",
"3b0f6720-2ffd-4f8a-afcd-7e3bb311212d",
"415b509a-cf39-45bc-9acf-7f982a00e159",
"4221276f-a3b0-4992-b426-ec2d8c7347f2",
"49868211-d868-4b55-a201-5acac0be0cc0",
"55fdcbd0-9b2d-493a-b874-865ccc93a156",
"56124545-d266-4da9-ba1f-b3a16edc7f31",
"6c11453b-dbf8-4749-a0ec-ab705920e316"
};
ServiceDalI dal = DalProvider.factoryServiceDal();
List<Service> all = dal.getAll();
for (Service service: all) {
if (service.getPublisherConfigName() != null
&& service.getPublisherConfigName().equals(publisher)) {
boolean alreadyDone = false;
String idStr = service.getId().toString();
for (String doneId: done) {
if (idStr.equalsIgnoreCase(doneId)) {
alreadyDone = true;
break;
}
}
if (alreadyDone) {
continue;
}
fixEmisProblems3(service.getId(), systemId);
}
}
LOG.info("Done fix for " + publisher);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixEmisProblems3(UUID serviceId, UUID systemId) {
LOG.info("Fixing Emis Problems 3 for " + serviceId);
try {
Set<String> patientIds = new HashSet<>();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null);
LOG.info("Finding patients");
//Go through all files to work out problem children for every problem
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE);
for (int i=exchanges.size()-1; i>=0; i--) {
Exchange exchange = exchanges.get(i);
List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile item: payload) {
String type = item.getType();
if (type.equals("Admin_Patient")) {
InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath());
CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT);
Iterator<CSVRecord> iterator = parser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientId = record.get("PatientGuid");
patientIds.add(patientId);
}
parser.close();
}
}
}
LOG.info("Finished checking files, finding " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
for (String localPatientId: patientIds) {
Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId);
Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer);
String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference);
List<ResourceType> potentialResourceTypes = new ArrayList<>();
potentialResourceTypes.add(ResourceType.Procedure);
potentialResourceTypes.add(ResourceType.AllergyIntolerance);
potentialResourceTypes.add(ResourceType.FamilyMemberHistory);
potentialResourceTypes.add(ResourceType.Immunization);
potentialResourceTypes.add(ResourceType.DiagnosticOrder);
potentialResourceTypes.add(ResourceType.Specimen);
potentialResourceTypes.add(ResourceType.DiagnosticReport);
potentialResourceTypes.add(ResourceType.ReferralRequest);
potentialResourceTypes.add(ResourceType.Condition);
potentialResourceTypes.add(ResourceType.Observation);
for (ResourceType resourceType: potentialResourceTypes) {
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString());
for (ResourceWrapper wrapper : wrappers) {
if (wrapper.isDeleted()) {
continue;
}
String originalJson = wrapper.getResourceData();
DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson);
//Also go through all observation records and any that have parent observations - these need fixing too???
Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE);
if (extension != null) {
Reference reference = (Reference)extension.getValue();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
if (resource instanceof Observation) {
Observation obs = (Observation)resource;
if (obs.hasRelated()) {
for (Observation.ObservationRelatedComponent related: obs.getRelated()) {
if (related.hasTarget()) {
Reference reference = related.getTarget();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
}
if (resource instanceof DiagnosticReport) {
DiagnosticReport diag = (DiagnosticReport)resource;
if (diag.hasResult()) {
for (Reference reference: diag.getResult()) {
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
//Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save
if (resource instanceof Condition) {
if (resource.hasContained()) {
for (Resource contained: resource.getContained()) {
if (contained.getId().equals("Items")) {
List_ containedList = (List_)contained;
if (containedList.hasEntry()) {
for (List_.ListEntryComponent entry: containedList.getEntry()) {
Reference reference = entry.getItem();
fixReference(serviceId, filer, reference, potentialResourceTypes);
}
}
}
}
}
//sort out the nested extension references
Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED);
if (outerExtension != null) {
Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET);
if (innerExtension != null) {
Reference performerReference = (Reference)innerExtension.getValue();
String value = performerReference.getReference();
if (value.endsWith("}")) {
Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer);
innerExtension.setValue(globalPerformerReference);
}
}
}
}
//save the updated condition
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!newJson.equals(originalJson)) {
wrapper.setResourceData(newJson);
saveResourceWrapper(serviceId, wrapper);
fixed++;
}
}
}
done ++;
if (done % 1000 == 0) {
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
}
}
LOG.info("Done " + done + " patients and fixed " + fixed + " problems");
LOG.info("Finished Emis Problems 3 for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception {
//if it's already something other than observation, we're OK
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference);
if (comps.getResourceType() != ResourceType.Observation) {
return false;
}
Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference);
String sourceId = ReferenceHelper.getReferenceId(sourceReference);
String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId);
if (newReferenceValue == null) {
return false;
}
reference.setReference(newReferenceValue);
return true;
}
private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception {
ResourceDalI dal = DalProvider.factoryResourceDal();
for (ResourceType resourceType: potentials) {
UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId);
if (uuid == null) {
continue;
}
ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid);
if (wrapper != null) {
return ReferenceHelper.createResourceReference(resourceType, uuid.toString());
}
}
return null;
}*/
*//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095")
}*//*
/*private static void convertExchangeBody(UUID systemUuid) {
try {
LOG.info("Converting exchange bodies for system " + systemUuid);
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE);
if (exchanges.isEmpty()) {
continue;
}
LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges");
for (Exchange exchange: exchanges) {
String exchangeBody = exchange.getBody();
try {
//already done
ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class);
continue;
} catch (JsonSyntaxException ex) {
//if the JSON can't be parsed, then it'll be the old format of body that isn't JSON
}
List<ExchangePayloadFile> newFiles = new ArrayList<>();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (String file: files) {
ExchangePayloadFile fileObj = new ExchangePayloadFile();
String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1);
fileObj.setPath(fileWithoutSharedStorage);
//size
List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file);
for (FileInfo info: fileInfos) {
if (info.getFilePath().equals(file)) {
long size = info.getSize();
fileObj.setSize(new Long(size));
}
}
//type
if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live
|| systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev
//emis
String name = FilenameUtils.getName(file);
String[] toks = name.split("_");
String first = toks[1];
String second = toks[2];
fileObj.setType(first + "_" + second);
|| systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev
//cerner
String name = FilenameUtils.getName(file);
if (Strings.isNullOrEmpty(name)) {
continue;
}
try {
String type = BartsCsvToFhirTransformer.identifyFileType(name);
fileObj.setType(type);
} catch (Exception ex2) {
throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId());
} else {
throw new Exception("Unknown system ID " + systemUuid);
}
newFiles.add(fileObj);
}
String json = JsonSerializer.serialize(newFiles);
exchange.setBody(json);
exchangeDal.save(exchange);
}
}
LOG.info("Finished Converting exchange bodies for system " + systemUuid);
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*LOG.debug(json);
LOG.debug(newJson);*//*
/*private static void fixBartsOrgs(String serviceId) {
try {
LOG.info("Fixing Barts orgs");
ResourceDalI dal = DalProvider.factoryResourceDal();
List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString());
LOG.debug("Found " + wrappers.size() + " resources");
int done = 0;
int fixed = 0;
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId());
ResourceWrapper mostRecent = history.get(0);
String json = mostRecent.getResourceData();
Organization org = (Organization)FhirSerializationHelper.deserializeResource(json);
String odsCode = IdentifierHelper.findOdsCode(org);
if (Strings.isNullOrEmpty(odsCode)
&& org.hasIdentifier()) {
boolean hasBeenFixed = false;
for (Identifier identifier: org.getIdentifier()) {
if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE)
&& identifier.hasId()) {
odsCode = identifier.getId();
identifier.setValue(odsCode);
identifier.setId(null);
hasBeenFixed = true;
}
}
if (hasBeenFixed) {
String newJson = FhirSerializationHelper.serializeResource(org);
mostRecent.setResourceData(newJson);
LOG.debug("Fixed Organization " + org.getId());
saveResourceWrapper(UUID.fromString(serviceId), mostRecent);
fixed ++;
}
}
}
done ++;
if (done % 100 == 0) {
LOG.debug("Done " + done + ", Fixed " + fixed);
}
}
LOG.debug("Done " + done + ", Fixed " + fixed);
LOG.info("Finished Barts orgs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void testPreparedStatements(String url, String user, String pass, String serviceId) {
try {
LOG.info("Testing Prepared Statements");
LOG.info("Url: " + url);
LOG.info("user: " + user);
LOG.info("pass: " + pass);
//open connection
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", user);
props.setProperty("password", pass);
Connection conn = DriverManager.getConnection(url, props);
String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?";
long start = System.currentTimeMillis();
for (int i=0; i<10000; i++) {
PreparedStatement ps = null;
try {
ps = conn.prepareStatement(sql);
ps.setString(1, serviceId);
ps.setString(2, "MILLPERSIDtoMRN");
ps.setString(3, UUID.randomUUID().toString());
ResultSet rs = ps.executeQuery();
while (rs.next()) {
//do nothing
}
} finally {
if (ps != null) {
ps.close();
}
}
}
long end = System.currentTimeMillis();
LOG.info("Took " + (end-start) + " ms");
//close connection
conn.close();
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
*//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
*//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData());
}*//*
*//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData());
saveResourceWrapper(serviceId, currentState);*//*
/*private static void fixEncounters(String table) {
LOG.info("Fixing encounters from " + table);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
Date cutoff = sdf.parse("2018-03-14 11:42");
EntityManager entityManager = ConnectionManager.getAdminEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<UUID> serviceIds = new ArrayList<>();
Map<UUID, UUID> hmSystems = new HashMap<>();
String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
UUID serviceId = UUID.fromString(rs.getString(1));
UUID systemId = UUID.fromString(rs.getString(2));
serviceIds.add(serviceId);
hmSystems.put(serviceId, systemId);
}
rs.close();
statement.close();
entityManager.close();
for (UUID serviceId: serviceIds) {
UUID systemId = hmSystems.get(serviceId);
LOG.info("Doing service " + serviceId + " and system " + systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId);
List<UUID> exchangeIdsToProcess = new ArrayList<>();
for (UUID exchangeId: exchangeIds) {
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit: audits) {
Date d = audit.getStarted();
if (d.after(cutoff)) {
exchangeIdsToProcess.add(exchangeId);
break;
}
}
}
Map<String, ReferenceList> consultationNewChildMap = new HashMap<>();
Map<String, ReferenceList> observationChildMap = new HashMap<>();
Map<String, ReferenceList> newProblemChildren = new HashMap<>();
for (UUID exchangeId: exchangeIdsToProcess) {
Exchange exchange = exchangeDal.getExchange(exchangeId);
String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody());
String version = EmisCsvToFhirTransformer.determineVersion(files);
List<String> interestingFiles = new ArrayList<>();
for (String file: files) {
if (file.indexOf("CareRecord_Consultation") > -1
|| file.indexOf("CareRecord_Observation") > -1
|| file.indexOf("CareRecord_Diary") > -1
|| file.indexOf("Prescribing_DrugRecord") > -1
|| file.indexOf("Prescribing_IssueRecord") > -1
|| file.indexOf("CareRecord_Problem") > -1) {
interestingFiles.add(file);
}
}
files = interestingFiles.toArray(new String[0]);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers);
String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers);
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true);
Consultation consultationParser = (Consultation)parsers.get(Consultation.class);
while (consultationParser.nextRecord()) {
CsvCell consultationGuid = consultationParser.getConsultationGuid();
CsvCell patientGuid = consultationParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
consultationNewChildMap.put(sourceId, new ReferenceList());
}
Problem problemParser = (Problem)parsers.get(Problem.class);
while (problemParser.nextRecord()) {
CsvCell problemGuid = problemParser.getObservationGuid();
CsvCell patientGuid = problemParser.getPatientGuid();
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
newProblemChildren.put(sourceId, new ReferenceList());
}
//run this pre-transformer to pre-cache some stuff in the csv helper, which
//is needed when working out the resource type that each observation would be saved as
ObservationPreTransformer.transform(version, parsers, null, csvHelper);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
CsvCell observationGuid = observationParser.getObservationGuid();
CsvCell patientGuid = observationParser.getPatientGuid();
String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid);
CsvCell codeId = observationParser.getCodeId();
if (codeId.isEmpty()) {
continue;
}
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId);
if (obUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId);
//resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper);
}
Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString());
CsvCell consultationGuid = observationParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell problemGuid = observationParser.getProblemGuid();
if (!problemGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
CsvCell parentObGuid = observationParser.getParentObservationGuid();
if (!parentObGuid.isEmpty()) {
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid);
ReferenceList referenceList = observationChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
observationChildMap.put(sourceId, referenceList);
}
referenceList.add(obReference);
}
}
Diary diaryParser = (Diary)parsers.get(Diary.class);
while (diaryParser.nextRecord()) {
CsvCell consultationGuid = diaryParser.getConsultationGuid();
if (!consultationGuid.isEmpty()) {
CsvCell diaryGuid = diaryParser.getDiaryGuid();
CsvCell patientGuid = diaryParser.getPatientGuid();
String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid);
UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId);
if (diaryUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId);
}
Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid);
ReferenceList referenceList = consultationNewChildMap.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
consultationNewChildMap.put(sourceId, referenceList);
}
referenceList.add(diaryReference);
}
}
IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class);
while (issueRecordParser.nextRecord()) {
CsvCell problemGuid = issueRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid();
CsvCell patientGuid = issueRecordParser.getPatientGuid();
String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid);
UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId);
if (issueRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId);
}
Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(issueRecordReference);
}
}
DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class);
while (drugRecordParser.nextRecord()) {
CsvCell problemGuid = drugRecordParser.getProblemObservationGuid();
if (!problemGuid.isEmpty()) {
CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid();
CsvCell patientGuid = drugRecordParser.getPatientGuid();
String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid);
UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId);
if (drugRecordUuid == null) {
continue;
//LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId);
}
Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString());
String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid);
ReferenceList referenceList = newProblemChildren.get(sourceId);
if (referenceList == null) {
referenceList = new ReferenceList();
newProblemChildren.put(sourceId, referenceList);
}
referenceList.add(drugRecordReference);
}
}
for (AbstractCsvParser parser : parsers.values()) {
try {
parser.close();
} catch (IOException ex) {
//don't worry if this fails, as we're done anyway
}
}
}
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix");
for (String encounterSourceId: consultationNewChildMap.keySet()) {
ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId);
//map to UUID
UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId);
if (encounterId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Encounter " + encounterId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
EncounterBuilder encounterBuilder = new EncounterBuilder(encounter);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(encounter);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix");
for (String sourceId: observationChildMap.keySet()) {
ReferenceList childReferences = observationChildMap.get(sourceId);
//map to UUID
ResourceType resourceType = null;
UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId);
if (resourceId != null) {
resourceType = ResourceType.Observation;
} else {
resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId);
if (resourceId != null) {
resourceType = ResourceType.DiagnosticReport;
} else {
continue;
}
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId);
if (history.isEmpty()) {
//throw new Exception("Empty history for " + resourceType + " " + resourceId);
continue;
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (resourceType == ResourceType.Observation) {
if (wrapper.getResourceData() != null) {
Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (observation.hasRelated()) {
for (Observation.ObservationRelatedComponent related : observation.getRelated()) {
Reference reference = related.getTarget();
childReferences.add(reference);
}
}
}
} else {
if (wrapper.getResourceData() != null) {
DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
if (report.hasResult()) {
for (Reference reference : report.getResult()) {
childReferences.add(reference);
}
}
}
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
boolean changed = false;
if (resourceType == ResourceType.Observation) {
ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addChildObservation(reference)) {
changed = true;
}
}
} else {
DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource);
for (int i=0; i<childReferences.size(); i++) {
Reference reference = childReferences.getReference(i);
if (resourceBuilder.addResult(reference)) {
changed = true;
}
}
}
if (changed) {
String newJson = FhirSerializationHelper.serializeResource(resource);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
LOG.info("Found " + newProblemChildren.size() + " Problems to fix");
for (String sourceId: newProblemChildren.keySet()) {
ReferenceList childReferences = newProblemChildren.get(sourceId);
//map to UUID
UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId);
if (conditionId == null) {
continue;
}
//get history, which is most recent FIRST
List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId);
if (history.isEmpty()) {
continue;
//throw new Exception("Empty history for Condition " + conditionId);
}
ResourceWrapper currentState = history.get(0);
if (currentState.isDeleted()) {
continue;
}
//find last instance prior to cutoff and get its linked children
for (ResourceWrapper wrapper: history) {
Date d = wrapper.getCreatedAt();
if (!d.after(cutoff)) {
if (wrapper.getResourceData() != null) {
Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
List<Reference> previousChildren = containedListBuilder.getContainedListItems();
childReferences.add(previousChildren);
}
break;
}
}
if (childReferences.size() == 0) {
continue;
}
String json = currentState.getResourceData();
Resource resource = FhirSerializationHelper.deserializeResource(json);
String newJson = FhirSerializationHelper.serializeResource(resource);
if (!json.equals(newJson)) {
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
saveResourceWrapper(serviceId, currentState);
}
ConditionBuilder conditionBuilder = new ConditionBuilder(condition);
ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder);
containedListBuilder.addReferences(childReferences);
String newJson = FhirSerializationHelper.serializeResource(condition);
currentState.setResourceData(newJson);
currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson));
}
//mark as done
String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';";
entityManager = ConnectionManager.getAdminEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
}
*/
*//*
/**
* For each practice:
* Go through all files processed since 14 March
* Cache all links as above
* Cache all Encounters saved too
* <p>
* For each Encounter referenced at all:
* Retrieve latest version from resource current
* Retrieve version prior to 14 March
* Update current version with old references plus new ones
* <p>
* For each parent observation:
* Retrieve latest version (could be observation or diagnostic report)
* <p>
* For each problem:
* Retrieve latest version from resource current
* Check if still a problem:
* Retrieve version prior to 14 March
* Update current version with old references plus new ones
LOG.info("Finished Fixing encounters from " + table);
} catch (Throwable t) {
LOG.error("", t);
}
}*/
private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception {
if (wrapper.getVersion() == null) {
throw new Exception("Can't update resource history without version UUID");
}
if (wrapper.getResourceData() != null) {
long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData());
wrapper.setResourceChecksum(new Long(checksum));
}
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
entityManager.getTransaction().begin();
String json = wrapper.getResourceData();
json = json.replace("'", "''");
json = json.replace("\\", "\\\\");
String patientId = "";
if (wrapper.getPatientId() != null) {
patientId = wrapper.getPatientId().toString();
}
String updateSql = "UPDATE resource_current"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE service_id = '" + wrapper.getServiceId() + "'"
+ " AND patient_id = '" + patientId + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
+ " AND resource_id = '" + wrapper.getResourceId() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
//SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS");
//String createdAtStr = sdf.format(wrapper.getCreatedAt());
updateSql = "UPDATE resource_history"
+ " SET resource_data = '" + json + "',"
+ " resource_checksum = " + wrapper.getResourceChecksum()
+ " WHERE resource_id = '" + wrapper.getResourceId() + "'"
+ " AND resource_type = '" + wrapper.getResourceType() + "'"
//+ " AND created_at = '" + createdAtStr + "'"
+ " AND version = '" + wrapper.getVersion() + "'";
statement.executeUpdate(updateSql);
//LOG.debug(updateSql);
entityManager.getTransaction().commit();
}
/*private static void populateNewSearchTable(String table) {
LOG.info("Populating New Search Table");
try {
EntityManager entityManager = ConnectionManager.getEdsEntityManager();
SessionImpl session = (SessionImpl)entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
List<String> patientIds = new ArrayList<>();
Map<String, String> serviceIds = new HashMap<>();
String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientId = rs.getString(1);
String serviceId = rs.getString(2);
patientIds.add(patientId);
serviceIds.put(patientId, serviceId);
}
rs.close();
statement.close();
entityManager.close();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearch2Dal();
LOG.info("Found " + patientIds.size() + " to do");
for (int i=0; i<patientIds.size(); i++) {
String patientIdStr = patientIds.get(i);
UUID patientId = UUID.fromString(patientIdStr);
String serviceIdStr = serviceIds.get(patientIdStr);
UUID serviceId = UUID.fromString(serviceIdStr);
Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr);
if (patient != null) {
patientSearchDal.update(serviceId, patient);
//find episode of care
List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, null, patientId, ResourceType.EpisodeOfCare.toString());
for (ResourceWrapper wrapper: wrappers) {
if (!wrapper.isDeleted()) {
EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData());
patientSearchDal.update(serviceId, episodeOfCare);
}
}
}
String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';";
entityManager = ConnectionManager.getEdsEntityManager();
session = (SessionImpl)entityManager.getDelegate();
connection = session.connection();
statement = connection.createStatement();
entityManager.getTransaction().begin();
statement.executeUpdate(updateSql);
entityManager.getTransaction().commit();
if (i % 5000 == 0) {
LOG.info("Done " + (i+1) + " of " + patientIds.size());
}
}
entityManager.close();
LOG.info("Finished Populating New Search Table");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) {
LOG.info("Creating Barts Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds);
LOG.info("Finished Creating Barts Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
/*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
for (File sourceFile: sourceDir.listFiles()) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
LOG.info("Doing dir " + sourceFile);
createBartsSubsetForFile(sourceFile, destFile, personIds);
} else {
//we have some bad partial files in, so ignore them
String ext = FilenameUtils.getExtension(name);
if (ext.equalsIgnoreCase("filepart")) {
continue;
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String baseName = FilenameUtils.getBaseName(name);
String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName);
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex ++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i=0; i<expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}*/
private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception {
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
for (Exchange exchange : exchanges) {
List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody());
for (ExchangePayloadFile fileObj : files) {
String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length() + 1);
String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage);
File sourceFile = new File(sourceFilePath);
String destFilePath = fileObj.getPath();
File destFile = new File(destFilePath);
File destDir = destFile.getParentFile();
if (!destDir.exists()) {
destDir.mkdirs();
}
//if the file is empty, we still need the empty file in the filtered directory, so just copy it
if (sourceFile.length() == 0) {
LOG.info("Copying empty file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
continue;
}
String fileType = fileObj.getType();
if (isCerner22File(fileType)) {
LOG.info("Checking 2.2 file " + sourceFile);
if (destFile.exists()) {
destFile.delete();
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
int lineIndex = -1;
PrintWriter pw = null;
int personIdColIndex = -1;
int expectedCols = -1;
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
lineIndex++;
if (lineIndex == 0) {
if (fileType.equalsIgnoreCase("FAMILYHISTORY")) {
//this file has no headers, so needs hard-coding
personIdColIndex = 5;
} else {
//check headings for PersonID col
String[] toks = line.split("\\|", -1);
expectedCols = toks.length;
for (int i = 0; i < expectedCols; i++) {
String col = toks[i];
if (col.equalsIgnoreCase("PERSON_ID")
|| col.equalsIgnoreCase("#PERSON_ID")) {
personIdColIndex = i;
break;
}
}
//if no person ID, then just copy the entire file
if (personIdColIndex == -1) {
br.close();
br = null;
LOG.info(" Copying 2.2 file to " + destFile);
copyFile(sourceFile, destFile);
break;
} else {
LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex);
}
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
pw = new PrintWriter(bw);
} else {
//filter on personID
String[] toks = line.split("\\|", -1);
if (expectedCols != -1
&& toks.length != expectedCols) {
throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols);
} else {
String personId = toks[personIdColIndex];
if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes
&& !personIds.contains(personId)) {
continue;
}
}
}
pw.println(line);
}
if (br != null) {
br.close();
}
if (pw != null) {
pw.flush();
pw.close();
}
} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
if (!destFile.exists()) {
copyFile(sourceFile, destFile);
}
}
}
}
}
private static void copyFile(File src, File dst) throws Exception {
FileInputStream fis = new FileInputStream(src);
BufferedInputStream bis = new BufferedInputStream(fis);
Files.copy(bis, dst.toPath());
bis.close();
}
private static boolean isCerner22File(String fileType) throws Exception {
if (fileType.equalsIgnoreCase("PPATI")
|| fileType.equalsIgnoreCase("PPREL")
|| fileType.equalsIgnoreCase("CDSEV")
|| fileType.equalsIgnoreCase("PPATH")
|| fileType.equalsIgnoreCase("RTTPE")
|| fileType.equalsIgnoreCase("AEATT")
|| fileType.equalsIgnoreCase("AEINV")
|| fileType.equalsIgnoreCase("AETRE")
|| fileType.equalsIgnoreCase("OPREF")
|| fileType.equalsIgnoreCase("OPATT")
|| fileType.equalsIgnoreCase("EALEN")
|| fileType.equalsIgnoreCase("EALSU")
|| fileType.equalsIgnoreCase("EALOF")
|| fileType.equalsIgnoreCase("HPSSP")
|| fileType.equalsIgnoreCase("IPEPI")
|| fileType.equalsIgnoreCase("IPWDS")
|| fileType.equalsIgnoreCase("DELIV")
|| fileType.equalsIgnoreCase("BIRTH")
|| fileType.equalsIgnoreCase("SCHAC")
|| fileType.equalsIgnoreCase("APPSL")
|| fileType.equalsIgnoreCase("DIAGN")
|| fileType.equalsIgnoreCase("PROCE")
|| fileType.equalsIgnoreCase("ORDER")
|| fileType.equalsIgnoreCase("DOCRP")
|| fileType.equalsIgnoreCase("DOCREF")
|| fileType.equalsIgnoreCase("CNTRQ")
|| fileType.equalsIgnoreCase("LETRS")
|| fileType.equalsIgnoreCase("LOREF")
|| fileType.equalsIgnoreCase("ORGREF")
|| fileType.equalsIgnoreCase("PRSNLREF")
|| fileType.equalsIgnoreCase("CVREF")
|| fileType.equalsIgnoreCase("NOMREF")
|| fileType.equalsIgnoreCase("EALIP")
|| fileType.equalsIgnoreCase("CLEVE")
|| fileType.equalsIgnoreCase("ENCNT")
|| fileType.equalsIgnoreCase("RESREF")
|| fileType.equalsIgnoreCase("PPNAM")
|| fileType.equalsIgnoreCase("PPADD")
|| fileType.equalsIgnoreCase("PPPHO")
|| fileType.equalsIgnoreCase("PPALI")
|| fileType.equalsIgnoreCase("PPINF")
|| fileType.equalsIgnoreCase("PPAGP")
|| fileType.equalsIgnoreCase("SURCC")
|| fileType.equalsIgnoreCase("SURCP")
|| fileType.equalsIgnoreCase("SURCA")
|| fileType.equalsIgnoreCase("SURCD")
|| fileType.equalsIgnoreCase("PDRES")
|| fileType.equalsIgnoreCase("PDREF")
|| fileType.equalsIgnoreCase("ABREF")
|| fileType.equalsIgnoreCase("CEPRS")
|| fileType.equalsIgnoreCase("ORDDT")
|| fileType.equalsIgnoreCase("STATREF")
|| fileType.equalsIgnoreCase("STATA")
|| fileType.equalsIgnoreCase("ENCINF")
|| fileType.equalsIgnoreCase("SCHDETAIL")
|| fileType.equalsIgnoreCase("SCHOFFER")
|| fileType.equalsIgnoreCase("PPGPORG")
|| fileType.equalsIgnoreCase("FAMILYHISTORY")) {
return true;
} else {
return false;
}
}
/*private static void fixSubscriberDbs() {
LOG.info("Fixing Subscriber DBs");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches");
List<UUID> batchIds = new ArrayList<>();
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
batchIds.add(batchId);
}
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Subscriber DBs");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
*//*if (!referral.hasServiceRequested()) {
referral.getServiceRequested().clear();*//*
/*private static void fixReferralRequests() {
LOG.info("Fixing Referral Requests");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
boolean needsFixing = false;
Set<UUID> patientIdsToPost = new HashSet<>();
for (UUID exchangeId: exchangeIds) {
if (!needsFixing) {
List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId);
for (ExchangeTransformAudit audit: transformAudits) {
Date transfromStart = audit.getStarted();
if (!transfromStart.before(dateError)) {
needsFixing = true;
break;
}
}
}
if (!needsFixing) {
continue;
}
List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId);
Exchange exchange = exchangeDal.getExchange(exchangeId);
LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
UUID batchId = batch.getBatchId();
List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper wrapper: wrappers) {
String resourceType = wrapper.getResourceType();
if (!resourceType.equals(ResourceType.ReferralRequest.toString())
|| wrapper.isDeleted()) {
continue;
}
String json = wrapper.getResourceData();
ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json);
continue;
}
CodeableConcept reason = referral.getServiceRequested().get(0);
referral.setReason(reason);
if (!referral.hasReason()) {
continue;
}
CodeableConcept reason = referral.getReason();
referral.setReason(null);
referral.addServiceRequested(reason);
json = FhirSerializationHelper.serializeResource(referral);
wrapper.setResourceData(json);
saveResourceWrapper(serviceId, wrapper);
//add to the set of patients we know need sending on to the protocol queue
patientIdsToPost.add(patientId);
LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId);
}
//if our patient has just been fixed or was fixed before, post onto the protocol queue
if (patientIdsToPost.contains(patientId)) {
List<UUID> batchIds = new ArrayList<>();
batchIds.add(batchId);
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
}
}
}
LOG.info("Finished Fixing Referral Requests");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/*private static void applyEmisAdminCaches() {
LOG.info("Applying Emis Admin Caches");
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3");
UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774");
List<Service> services = serviceDal.getAll();
for (Service service: services) {
String endpointsJson = service.getEndpoints();
if (Strings.isNullOrEmpty(endpointsJson)) {
continue;
}
UUID serviceId = service.getId();
LOG.info("Checking " + service.getName() + " " + serviceId);
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
if (!endpointSystemId.equals(emisSystem)
&& !endpointSystemId.equals(emisSystemDev)) {
LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis");
continue;
}
if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) {
LOG.info(" Service not started, so skipping");
continue;
}
//get exchanges
List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId);
if (exchangeIds.isEmpty()) {
LOG.info(" No exchanges found, so skipping");
continue;
}
UUID firstExchangeId = exchangeIds.get(0);
List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId);
boolean appliedAdminCache = false;
for (ExchangeEvent event: events) {
if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) {
appliedAdminCache = true;
}
}
if (appliedAdminCache) {
LOG.info(" Have already applied admin cache, so skipping");
continue;
}
Exchange exchange = exchangeDal.getExchange(firstExchangeId);
String body = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(body);
if (files.length == 0) {
LOG.info(" No files in exchange " + firstExchangeId + " so skipping");
continue;
}
String firstFilePath = files[0];
String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension
String[] toks = name.split("_");
if (toks.length != 5) {
throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath);
}
String sharingAgreementGuid = toks[4];
List<UUID> batchIds = new ArrayList<>();
TransformError transformError = new TransformError();
FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds);
EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(),
fhirResourceFiler.getExchangeId(), sharingAgreementGuid,
true);
ExchangeTransformAudit transformAudit = new ExchangeTransformAudit();
transformAudit.setServiceId(serviceId);
transformAudit.setSystemId(endpointSystemId);
transformAudit.setExchangeId(firstExchangeId);
transformAudit.setId(UUID.randomUUID());
transformAudit.setStarted(new Date());
LOG.info(" Going to apply admin resource cache");
csvHelper.applyAdminResourceCache(fhirResourceFiler);
fhirResourceFiler.waitToFinish();
for (UUID batchId: batchIds) {
LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId);
}
transformAudit.setEnded(new Date());
transformAudit.setNumberBatchesCreated(new Integer(batchIds.size()));
boolean hadError = false;
if (transformError.getError().size() > 0) {
transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError));
hadError = true;
}
exchangeDal.save(transformAudit);
//clear down the cache of reference mappings since they won't be of much use for the next Exchange
IdHelper.clearCache();
if (hadError) {
LOG.error(" <<<<<<Error applying resource cache!");
continue;
}
//add the event to say we've applied the cache
AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache");
//post that ONE new batch ID onto the protocol queue
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray());
exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr);
PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol");
PostMessageToExchange component = new PostMessageToExchange(exchangeConfig);
component.process(exchange);
}
}
LOG.info("Finished Applying Emis Admin Caches");
} catch (Throwable t) {
LOG.error("", t);
}
}*/
/**
* fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by
* replacing the "delete" extracts with newly generated deltas that can be processed
* before the re-bulk is done
*/
private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) {
LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Service service = serviceDal.getByLocalIdentifier(serviceOdsCode);
LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId());
/*File tempDirLast = new File(tempDir, "last");
if (!tempDirLast.exists()) {
if (!tempDirLast.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirLast);
}
tempDirLast.mkdirs();
}
File tempDirEmpty = new File(tempDir, "empty");
if (!tempDirEmpty.exists()) {
if (!tempDirEmpty.mkdirs()) {
throw new Exception("Failed to create temp dir " + tempDirEmpty);
}
tempDirEmpty.mkdirs();
}*/
String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode);
File f = new File(tempDir);
if (f.exists()) {
FileUtils.deleteDirectory(f);
}
UUID serviceUuid = service.getId();
UUID systemUuid = UUID.fromString(systemId);
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
//get all the exchanges, which are returned in reverse order, most recent first
List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE);
Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>();
Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>();
//reverse the exchange list and cache the files for each one
List<Exchange> exchanges = new ArrayList<>();
for (int i = exchangesDesc.size() - 1; i >= 0; i
Exchange exchange = exchangesDesc.get(i);
String exchangeBody = exchange.getBody();
String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
//drop out and ignore any exchanges containing the singular bespoke reg status files
if (files.length <= 1) {
continue;
}
//drop out and ignore any exchanges for the left and dead extracts, since we don't
//expect to receive re-bulked data for the dead patients
String firstFile = files[0];
if (firstFile.indexOf("LEFT_AND_DEAD") > -1) {
continue;
}
exchanges.add(exchange);
//populate the map of the files with the shared storage prefix
List<String> fileList = Lists.newArrayList(files);
hmExchangeFiles.put(exchange, fileList);
//populate a map of the same files without the prefix
files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody);
for (int j = 0; j < files.length; j++) {
String file = files[j].substring(sharedStoragePath.length() + 1);
files[j] = file;
}
fileList = Lists.newArrayList(files);
hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList);
}
/*exchanges.sort((o1, o2) -> {
Date d1 = o1.getTimestamp();
Date d2 = o2.getTimestamp();
return d1.compareTo(d2);
});*/
LOG.info("Found " + exchanges.size() + " exchanges and cached their files");
int indexDisabled = -1;
int indexRebulked = -1;
int indexOriginallyBulked = -1;
//go back through them to find the extract where the re-bulk is and when it was disabled (the list is in date order, so we're iterating most-recent first)
for (int i = exchanges.size() - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
boolean disabled = isDisabledInSharingAgreementFile(files);
if (disabled) {
indexDisabled = i;
} else {
if (indexDisabled == -1) {
indexRebulked = i;
} else {
//if we've found a non-disabled extract older than the disabled ones,
//then we've gone far enough back
break;
}
}
}
//go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled)
for (int i = indexDisabled - 1; i >= 0; i
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
boolean disabled = isDisabledInSharingAgreementFile(files);
if (disabled) {
break;
}
indexOriginallyBulked = i;
}
if (indexOriginallyBulked > -1) {
Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked);
LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId());
}
if (indexDisabled > -1) {
Exchange exchangeDisabled = exchanges.get(indexDisabled);
LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId());
}
if (indexRebulked > -1) {
Exchange exchangeRebulked = exchanges.get(indexRebulked);
LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId());
}
if (indexDisabled == -1
|| indexRebulked == -1
|| indexOriginallyBulked == -1) {
throw new Exception("Failed to find exchanges for original bulk (" + indexOriginallyBulked + ") disabling (" + indexDisabled + ") or re-bulking (" + indexRebulked + ")");
}
//continueOrQuit();
Exchange exchangeRebulked = exchanges.get(indexRebulked);
List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked);
List<String> tempFilesCreated = new ArrayList<>();
Set<String> patientGuidsDeletedOrTooOld = new HashSet<>();
for (String rebulkFile : rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (!isPatientFile(fileType)) {
continue;
}
LOG.info("Doing " + fileType);
String guidColumnName = getGuidColumnName(fileType);
//find all the guids in the re-bulk
Set<String> idsInRebulk = new HashSet<>();
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
String[] headers = null;
try {
headers = CsvHelper.getHeaderMapAsArray(csvParser);
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
//get the patient and row guid out of the file and cache in our set
String id = record.get("PatientGuid");
if (!Strings.isNullOrEmpty(guidColumnName)) {
id += "//" + record.get(guidColumnName);
}
idsInRebulk.add(id);
}
} finally {
csvParser.close();
}
LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile);
//create a replacement file for the exchange the service was disabled
String replacementDisabledFile = null;
Exchange exchangeDisabled = exchanges.get(indexDisabled);
List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled);
for (String s : disabledFiles) {
String disabledFileType = findFileType(s);
if (disabledFileType.equals(fileType)) {
replacementDisabledFile = FilenameUtils.concat(tempDir, s);
File dir = new File(replacementDisabledFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
tempFilesCreated.add(s);
LOG.info("Created replacement file " + replacementDisabledFile);
}
}
FileWriter fileWriter = new FileWriter(replacementDisabledFile);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
Set<String> pastIdsProcessed = new HashSet<>();
//now go through all files of the same type PRIOR to the service was disabled
//to find any rows that we'll need to explicitly delete because they were deleted while
//the extract was disabled
for (int i = indexDisabled - 1; i >= indexOriginallyBulked; i
Exchange exchange = exchanges.get(i);
String originalFile = null;
List<String> files = hmExchangeFiles.get(exchange);
for (String s : files) {
String originalFileType = findFileType(s);
if (originalFileType.equals(fileType)) {
originalFile = s;
break;
}
}
if (originalFile == null) {
continue;
}
LOG.info(" Reading " + originalFile);
reader = FileHelper.readFileReaderFromSharedStorage(originalFile);
csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
while (iterator.hasNext()) {
CSVRecord record = iterator.next();
String patientGuid = record.get("PatientGuid");
//get the patient and row guid out of the file and cache in our set
String uniqueId = patientGuid;
if (!Strings.isNullOrEmpty(guidColumnName)) {
uniqueId += "//" + record.get(guidColumnName);
}
//if we're already handled this record in a more recent extract, then skip it
if (pastIdsProcessed.contains(uniqueId)) {
continue;
}
pastIdsProcessed.add(uniqueId);
//if this ID isn't deleted and isn't in the re-bulk then it means
//it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted
//from Emis Web while the extract feed was disabled
//if the record is deleted, then we won't expect it in the re-bulk
boolean deleted = Boolean.parseBoolean(record.get("Deleted"));
if (deleted) {
//if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes
if (fileType.equals("Admin_Patient")) {
patientGuidsDeletedOrTooOld.add(patientGuid);
}
continue;
}
//if it's not the patient file and we refer to a patient that we know
//has been deleted, then skip this row, since we know we're deleting the entire patient record
if (patientGuidsDeletedOrTooOld.contains(patientGuid)) {
continue;
}
//if the re-bulk contains a record matching this one, then it's OK
if (idsInRebulk.contains(uniqueId)) {
continue;
}
//the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago),
//so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped
if (fileType.equals("Admin_Patient")) {
//retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid);
if (patientUuid == null) {
throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]");
}
Patient patientResource = (Patient) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString());
if (patientResource.hasDeceased()) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too
EpisodeOfCare episodeResource = (EpisodeOfCare) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString());
if (episodeResource.hasPeriod()
&& !PeriodHelper.isActive(episodeResource.getPeriod())) {
patientGuidsDeletedOrTooOld.add(patientGuid);
continue;
}
}
//create a new CSV record, carrying over the GUIDs from the original but marking as deleted
String[] newRecord = new String[headers.length];
for (int j = 0; j < newRecord.length; j++) {
String header = headers[j];
if (header.equals("PatientGuid")
|| header.equals("OrganisationGuid")
|| (!Strings.isNullOrEmpty(guidColumnName)
&& header.equals(guidColumnName))) {
String val = record.get(header);
newRecord[j] = val;
} else if (header.equals("Deleted")) {
newRecord[j] = "true";
} else {
newRecord[j] = "";
}
}
csvPrinter.printRecord((Object[]) newRecord);
csvPrinter.flush();
//log out the raw record that's missing from the original
StringBuffer sb = new StringBuffer();
sb.append("Record not in re-bulk: ");
for (int j = 0; j < record.size(); j++) {
if (j > 0) {
sb.append(",");
}
sb.append(record.get(j));
}
LOG.info(sb.toString());
}
} finally {
csvParser.close();
}
}
csvPrinter.flush();
csvPrinter.close();
//also create a version of the CSV file with just the header and nothing else in
for (int i = indexDisabled + 1; i < indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s : exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals(fileType)) {
String emptyTempFile = FilenameUtils.concat(tempDir, s);
File dir = new File(emptyTempFile).getParentFile();
if (!dir.exists()) {
if (!dir.mkdirs()) {
throw new Exception("Failed to create directory " + dir);
}
}
fileWriter = new FileWriter(emptyTempFile);
bufferedWriter = new BufferedWriter(fileWriter);
csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers));
csvPrinter.flush();
csvPrinter.close();
tempFilesCreated.add(s);
LOG.info("Created empty file " + emptyTempFile);
}
}
}
}
//we also need to copy the restored sharing agreement file to replace all the period it was disabled
String rebulkedSharingAgreementFile = null;
for (String s : rebulkFiles) {
String fileType = findFileType(s);
if (fileType.equals("Agreements_SharingOrganisation")) {
rebulkedSharingAgreementFile = s;
}
}
for (int i = indexDisabled; i < indexRebulked; i++) {
Exchange ex = exchanges.get(i);
List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex);
for (String s : exchangeFiles) {
String exchangeFileType = findFileType(s);
if (exchangeFileType.equals("Agreements_SharingOrganisation")) {
String replacementFile = FilenameUtils.concat(tempDir, s);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile);
File replacementFileObj = new File(replacementFile);
Files.copy(inputStream, replacementFileObj.toPath());
inputStream.close();
tempFilesCreated.add(s);
}
}
}
//create a script to copy the files into S3
List<String> copyScript = new ArrayList<>();
copyScript.add("#!/bin/bash");
copyScript.add("");
for (String s : tempFilesCreated) {
String localFile = FilenameUtils.concat(tempDir, s);
copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s);
}
String scriptFile = FilenameUtils.concat(tempDir, "copy.sh");
FileUtils.writeLines(new File(scriptFile), copyScript);
LOG.info("Finished - written files to " + tempDir);
dumpFileSizes(new File(tempDir));
/*continueOrQuit();
//back up every file where the service was disabled
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
//first download from S3 to the local temp dir
InputStream inputStream = FileHelper.readFileFromSharedStorage(file);
String fileName = FilenameUtils.getName(file);
String tempPath = FilenameUtils.concat(tempDir, fileName);
File downloadDestination = new File(tempPath);
Files.copy(inputStream, downloadDestination.toPath());
//then write back to S3 in a sub-dir of the original file
String backupPath = FilenameUtils.getPath(file);
backupPath = FilenameUtils.concat(backupPath, "Original");
backupPath = FilenameUtils.concat(backupPath, fileName);
FileHelper.writeFileToSharedStorage(backupPath, downloadDestination);
LOG.info("Backed up " + file + " -> " + backupPath);
//delete from temp dir
downloadDestination.delete();
}
}
continueOrQuit();
//copy the new CSV files into the dir where it was disabled
List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled);
for (String disabledFile: disabledFiles) {
String fileType = findFileType(disabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected temp file " + f);
}
FileHelper.writeFileToSharedStorage(disabledFile, f);
LOG.info("Copied " + tempFile + " -> " + disabledFile);
}
continueOrQuit();
//empty the patient files for any extracts while the service was disabled
for (int i=indexDisabled+1; i<indexRebulked; i++) {
Exchange otherExchangeDisabled = exchanges.get(i);
List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled);
for (String otherDisabledFile: otherDisabledFiles) {
String fileType = findFileType(otherDisabledFile);
if (!isPatientFile(fileType)) {
continue;
}
String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(otherDisabledFile, f);
LOG.info("Copied " + tempFile + " -> " + otherDisabledFile);
}
}
continueOrQuit();
//copy the content of the sharing agreement file from when it was re-bulked
for (String rebulkFile: rebulkFiles) {
String fileType = findFileType(rebulkFile);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File downloadDestination = new File(tempFile);
InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile);
Files.copy(inputStream, downloadDestination.toPath());
tempFilesCreated.add(tempFile);
}
}
//replace the sharing agreement file for all disabled extracts with the non-disabled one
for (int i=indexDisabled; i<indexRebulked; i++) {
Exchange exchange = exchanges.get(i);
List<String> files = hmExchangeFiles.get(exchange);
for (String file: files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv");
File f = new File(tempFile);
if (!f.exists()) {
throw new Exception("Failed to find expected empty file " + f);
}
FileHelper.writeFileToSharedStorage(file, f);
LOG.info("Copied " + tempFile + " -> " + file);
}
}
}
LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId);
continueOrQuit();
for (String tempFileCreated: tempFilesCreated) {
File f = new File(tempFileCreated);
if (f.exists()) {
f.delete();
}
}*/
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void dumpFileSizes(File f) {
if (f.isDirectory()) {
for (File child : f.listFiles()) {
dumpFileSizes(child);
}
} else {
String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length());
LOG.info("" + f + " = " + totalSizeReadable);
}
}
private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception {
List<String> files = fileMap.get(exchange);
String file = findSharingAgreementFile(files);
String name = FilenameUtils.getBaseName(file);
String[] toks = name.split("_");
return toks[3];
}
private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception {
String file = findSharingAgreementFile(files);
InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file);
CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT);
try {
Iterator<CSVRecord> iterator = csvParser.iterator();
CSVRecord record = iterator.next();
String s = record.get("Disabled");
boolean disabled = Boolean.parseBoolean(s);
return disabled;
} finally {
csvParser.close();
}
}
private static void continueOrQuit() throws Exception {
LOG.info("Enter y to continue, anything else to quit");
byte[] bytes = new byte[10];
System.in.read(bytes);
char c = (char) bytes[0];
if (c != 'y' && c != 'Y') {
System.out.println("Read " + c);
System.exit(1);
}
}
private static String getGuidColumnName(String fileType) {
if (fileType.equals("Admin_Patient")) {
//patient file just has patient GUID, nothing extra
return null;
} else if (fileType.equals("CareRecord_Consultation")) {
return "ConsultationGuid";
} else if (fileType.equals("CareRecord_Diary")) {
return "DiaryGuid";
} else if (fileType.equals("CareRecord_Observation")) {
return "ObservationGuid";
} else if (fileType.equals("CareRecord_Problem")) {
//there is no separate problem GUID, as it's just a modified observation
return "ObservationGuid";
} else if (fileType.equals("Prescribing_DrugRecord")) {
return "DrugRecordGuid";
} else if (fileType.equals("Prescribing_IssueRecord")) {
return "IssueRecordGuid";
} else {
throw new IllegalArgumentException(fileType);
}
}
private static String findFileType(String filePath) {
String fileName = FilenameUtils.getName(filePath);
String[] toks = fileName.split("_");
String domain = toks[1];
String name = toks[2];
return domain + "_" + name;
}
private static boolean isPatientFile(String fileType) {
if (fileType.equals("Admin_Patient")
|| fileType.equals("CareRecord_Consultation")
|| fileType.equals("CareRecord_Diary")
|| fileType.equals("CareRecord_Observation")
|| fileType.equals("CareRecord_Problem")
|| fileType.equals("Prescribing_DrugRecord")
|| fileType.equals("Prescribing_IssueRecord")) {
//note the referral file doesn't have a Deleted column, so isn't in this list
return true;
} else {
return false;
}
}
private static String findSharingAgreementFile(List<String> files) throws Exception {
for (String file : files) {
String fileType = findFileType(file);
if (fileType.equals("Agreements_SharingOrganisation")) {
return file;
}
}
throw new Exception("Failed to find sharing agreement file in " + files.get(0));
}
private static void testSlack() {
LOG.info("Testing slack");
try {
SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader");
LOG.info("Finished testing slack");
} catch (Exception ex) {
LOG.error("", ex);
}
}
/*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) {
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath);
FileReader fr = new FileReader(filePath);
BufferedReader br = new BufferedReader(fr);
int count = 0;
List<UUID> exchangeIdBatch = new ArrayList<>();
while (true) {
String line = br.readLine();
if (line == null) {
break;
}
UUID exchangeId = UUID.fromString(line);
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
if (audit != null
&& !audit.isResubmitted()) {
audit.setResubmitted(true);
auditRepository.save(audit);
}
count ++;
exchangeIdBatch.add(exchangeId);
if (exchangeIdBatch.size() >= 1000) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
exchangeIdBatch = new ArrayList<>();
LOG.info("Done " + count);
}
}
if (!exchangeIdBatch.isEmpty()) {
QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false);
LOG.info("Done " + count);
}
br.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
/*private static void postToInbound(UUID serviceId, boolean all) {
LOG.info("Posting to inbound for " + serviceId);
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
Service service = serviceDalI.getById(serviceId);
List<UUID> systemIds = findSystemIds(service);
UUID systemId = systemIds.get(0);
ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId);
for (UUID exchangeId: errorState.getExchangeIdsInError()) {
//update the transform audit, so EDS UI knows we've re-queued this exchange
ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId);
//skip any exchange IDs we've already re-queued up to be processed again
if (audit.isResubmitted()) {
LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted");
continue;
}
LOG.debug("Re-posting " + audit.getExchangeId());
audit.setResubmitted(true);
auditRepository.save(audit);
//then re-submit the exchange to Rabbit MQ for the queue reader to pick up
QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false);
if (!all) {
LOG.info("Posted first exchange, so stopping");
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Posting to inbound for " + serviceId);
}*/
/*private static void fixPatientSearchAllServices(String filterSystemId) {
LOG.info("Fixing patient search for all services and system " + filterSystemId);
try {
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
List<Service> services = serviceDal.getAll();
for (Service service: services) {
fixPatientSearch(service.getId().toString(), filterSystemId);
}
LOG.info("Finished Fixing patient search for all services and system " + filterSystemId);
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixPatientSearch(String serviceId, String filterSystemId) {
LOG.info("Fixing patient search for service " + serviceId);
try {
UUID serviceUuid = UUID.fromString(serviceId);
UUID filterSystemUuid = null;
if (!Strings.isNullOrEmpty(filterSystemId)) {
filterSystemUuid = UUID.fromString(filterSystemId);
}
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal();
ServiceDalI serviceDal = DalProvider.factoryServiceDal();
Set<UUID> patientsDone = new HashSet<>();
Service service = serviceDal.getById(serviceUuid);
List<UUID> systemIds = findSystemIds(service);
for (UUID systemId: systemIds) {
if (filterSystemUuid != null
&& !filterSystemUuid.equals(systemId)) {
continue;
}
List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId);
LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId);
for (UUID exchangeId : exchanges) {
List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId == null) {
continue;
}
if (patientsDone.contains(patientId)) {
continue;
}
patientsDone.add(patientId);
ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId);
if (wrapper != null) {
String json = wrapper.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json);
patientSearchDal.update(serviceUuid, fhirPatient);
}
}
if (patientsDone.size() % 1000 == 0) {
LOG.info("Done " + patientsDone.size());
}
}
}
}
LOG.info("Done " + patientsDone.size());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished fixing patient search for " + serviceId);
}*/
private static void runSql(String host, String username, String password, String sqlFile) {
LOG.info("Running SQL on " + host + " from " + sqlFile);
Connection conn = null;
Statement statement = null;
try {
File f = new File(sqlFile);
if (!f.exists()) {
LOG.error("" + f + " doesn't exist");
return;
}
List<String> lines = FileUtils.readLines(f);
/*String combined = String.join("\n", lines);
LOG.info("Going to run SQL");
LOG.info(combined);*/
//load driver
Class.forName("com.mysql.cj.jdbc.Driver");
//create connection
Properties props = new Properties();
props.setProperty("user", username);
props.setProperty("password", password);
conn = DriverManager.getConnection(host, props);
LOG.info("Opened connection");
statement = conn.createStatement();
long totalStart = System.currentTimeMillis();
for (String sql : lines) {
sql = sql.trim();
if (sql.startsWith("
/*private static void fixExchangeBatches() {
LOG.info("Starting Fixing Exchange Batches");
try {
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
List<Service> services = serviceDalI.getAll();
for (Service service: services) {
LOG.info("Doing " + service.getName());
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId());
for (UUID exchangeId: exchangeIds) {
LOG.info(" Exchange " + exchangeId);
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId());
if (resources.isEmpty()) {
continue;
}
ResourceWrapper first = resources.get(0);
UUID patientId = first.getPatientId();
if (patientId != null) {
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchDalI.save(exchangeBatch);
LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId());
}
}
}
}
LOG.info("Finished Fixing Exchange Batches");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) {
LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath);
try {
File sourceFile = new File(sourceCsvPath);
CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
//"service_id","system_id","nhs_number","patient_id","count"
int count = 0;
HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>();
HashMap<UUID, Integer> patientIds = new HashMap<>();
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
count ++;
String serviceId = csvRecord.get("service_id");
String systemId = csvRecord.get("system_id");
String patientId = csvRecord.get("patient_id");
UUID serviceUuid = UUID.fromString(serviceId);
List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid);
if (systemIds == null) {
systemIds = new ArrayList<>();
serviceAndSystemIds.put(serviceUuid, systemIds);
}
systemIds.add(UUID.fromString(systemId));
patientIds.put(UUID.fromString(patientId), new Integer(count));
}
csvParser.close();
ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal();
ResourceDalI resourceDalI = DalProvider.factoryResourceDal();
ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal();
ServiceDalI serviceDalI = DalProvider.factoryServiceDal();
ParserPool parser = new ParserPool();
Map<Integer, List<Object[]>> patientRows = new HashMap<>();
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
for (UUID serviceId: serviceAndSystemIds.keySet()) {
//List<UUID> systemIds = serviceAndSystemIds.get(serviceId);
Service service = serviceDalI.getById(serviceId);
String serviceName = service.getName();
LOG.info("Doing service " + serviceId + " " + serviceName);
List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId);
LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan");
int exchangeCount = 0;
for (UUID exchangeId: exchangeIds) {
exchangeCount ++;
if (exchangeCount % 1000 == 0) {
LOG.info("Done " + exchangeCount + " exchanges");
}
List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch: exchangeBatches) {
UUID patientId = exchangeBatch.getEdsPatientId();
if (patientId != null
&& !patientIds.containsKey(patientId)) {
continue;
}
Integer patientIdInt = patientIds.get(patientId);
//get encounters for exchange batch
UUID batchId = exchangeBatch.getBatchId();
List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId);
for (ResourceWrapper resourceWrapper: resourceWrappers) {
if (resourceWrapper.isDeleted()) {
continue;
}
String resourceType = resourceWrapper.getResourceType();
if (!resourceType.equals(ResourceType.Encounter.toString())) {
continue;
}
LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId());
String json = resourceWrapper.getResourceData();
Encounter fhirEncounter = (Encounter)parser.parse(json);
Date date = null;
if (fhirEncounter.hasPeriod()) {
Period period = fhirEncounter.getPeriod();
if (period.hasStart()) {
date = period.getStart();
}
}
String episodeId = null;
if (fhirEncounter.hasEpisodeOfCare()) {
Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0);
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference);
EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirEpisode != null) {
if (fhirEpisode.hasIdentifier()) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID);
if (Strings.isNullOrEmpty(episodeId)) {
episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID);
}
}
}
}
String adtType = null;
String adtCode = null;
Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE);
if (extension != null) {
CodeableConcept codeableConcept = (CodeableConcept) extension.getValue();
Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE);
if (hl7MessageTypeCoding != null) {
adtType = hl7MessageTypeCoding.getDisplay();
adtCode = hl7MessageTypeCoding.getCode();
}
} else {
//for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body
try {
Exchange exchange = exchangeDalI.getExchange(exchangeId);
String exchangeBody = exchange.getBody();
Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody);
for (Bundle.BundleEntryComponent entry: bundle.getEntry()) {
if (entry.getResource() != null
&& entry.getResource() instanceof MessageHeader) {
MessageHeader header = (MessageHeader)entry.getResource();
if (header.hasEvent()) {
Coding coding = header.getEvent();
adtType = coding.getDisplay();
adtCode = coding.getCode();
}
}
}
} catch (Exception ex) {
//if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them
}
}
String cls = null;
if (fhirEncounter.hasClass_()) {
Encounter.EncounterClass encounterClass = fhirEncounter.getClass_();
if (encounterClass == Encounter.EncounterClass.OTHER
&& fhirEncounter.hasClass_Element()
&& fhirEncounter.getClass_Element().hasExtension()) {
for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) {
if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) {
//not 100% of the type of the value, so just append to a String
cls = "" + classExtension.getValue();
}
}
}
if (Strings.isNullOrEmpty(cls)) {
cls = encounterClass.toCode();
}
}
String type = null;
if (fhirEncounter.hasType()) {
//only seem to ever have one type
CodeableConcept codeableConcept = fhirEncounter.getType().get(0);
type = codeableConcept.getText();
}
String status = null;
if (fhirEncounter.hasStatus()) {
Encounter.EncounterState encounterState = fhirEncounter.getStatus();
status = encounterState.toCode();
}
String location = null;
String locationType = null;
if (fhirEncounter.hasLocation()) {
//first location is always the current location
Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0);
if (encounterLocation.hasLocation()) {
Reference locationReference = encounterLocation.getLocation();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference);
Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirLocation != null) {
if (fhirLocation.hasName()) {
location = fhirLocation.getName();
}
if (fhirLocation.hasType()) {
CodeableConcept typeCodeableConcept = fhirLocation.getType();
if (typeCodeableConcept.hasCoding()) {
Coding coding = typeCodeableConcept.getCoding().get(0);
locationType = coding.getDisplay();
}
}
}
}
}
String clinician = null;
if (fhirEncounter.hasParticipant()) {
//first participant seems to be the interesting one
Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0);
if (encounterParticipant.hasIndividual()) {
Reference practitionerReference = encounterParticipant.getIndividual();
ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference);
Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId());
if (fhirPractitioner != null) {
if (fhirPractitioner.hasName()) {
HumanName name = fhirPractitioner.getName();
clinician = name.getText();
if (Strings.isNullOrEmpty(clinician)) {
clinician = "";
for (StringType s: name.getPrefix()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getGiven()) {
clinician += s.getValueNotNull();
clinician += " ";
}
for (StringType s: name.getFamily()) {
clinician += s.getValueNotNull();
clinician += " ";
}
clinician = clinician.trim();
}
}
}
}
}
Object[] row = new Object[12];
row[0] = serviceName;
row[1] = patientIdInt.toString();
row[2] = sdfOutput.format(date);
row[3] = episodeId;
row[4] = adtCode;
row[5] = adtType;
row[6] = cls;
row[7] = type;
row[8] = status;
row[9] = location;
row[10] = locationType;
row[11] = clinician;
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows == null) {
rows = new ArrayList<>();
patientRows.put(patientIdInt, rows);
}
rows.add(row);
}
}
}
}
String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"};
FileWriter fileWriter = new FileWriter(outputPath);
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader(outputColumnHeaders)
.withQuote('"');
CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format);
for (int i=0; i <= count; i++) {
Integer patientIdInt = new Integer(i);
List<Object[]> rows = patientRows.get(patientIdInt);
if (rows != null) {
for (Object[] row: rows) {
csvPrinter.printRecord(row);
}
}
}
csvPrinter.close();
bufferedWriter.close();
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath);
}*/
/*private static void registerShutdownHook() {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
LOG.info("");
try {
Thread.sleep(5000);
} catch (Throwable ex) {
LOG.error("", ex);
}
LOG.info("Done");
}
});
}*/
private static void findEmisStartDates(String path, String outputPath) {
LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath);
try {
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss");
Map<String, Date> startDates = new HashMap<>();
Map<String, String> servers = new HashMap<>();
Map<String, String> names = new HashMap<>();
Map<String, String> odsCodes = new HashMap<>();
Map<String, String> cdbNumbers = new HashMap<>();
Map<String, Set<String>> distinctPatients = new HashMap<>();
File root = new File(path);
for (File sftpRoot : root.listFiles()) {
LOG.info("Checking " + sftpRoot);
Map<Date, File> extracts = new HashMap<>();
List<Date> extractDates = new ArrayList<>();
for (File extractRoot : sftpRoot.listFiles()) {
Date d = sdf.parse(extractRoot.getName());
//LOG.info("" + extractRoot.getName() + " -> " + d);
extracts.put(d, extractRoot);
extractDates.add(d);
}
Collections.sort(extractDates);
for (Date extractDate : extractDates) {
File extractRoot = extracts.get(extractDate);
LOG.info("Checking " + extractRoot);
//read the sharing agreements file
//e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv
File sharingAgreementsFile = null;
for (File f : extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("agreements_sharingorganisation") > -1
&& name.endsWith(".csv")) {
sharingAgreementsFile = f;
break;
}
}
if (sharingAgreementsFile == null) {
LOG.info("Null agreements file for " + extractRoot);
continue;
}
CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String activated = csvRecord.get("IsActivated");
String disabled = csvRecord.get("Disabled");
servers.put(orgGuid, sftpRoot.getName());
if (activated.equalsIgnoreCase("true")) {
if (disabled.equalsIgnoreCase("false")) {
Date d = sdf.parse(extractRoot.getName());
Date existingDate = startDates.get(orgGuid);
if (existingDate == null) {
startDates.put(orgGuid, d);
}
} else {
if (startDates.containsKey(orgGuid)) {
startDates.put(orgGuid, null);
}
}
}
}
} finally {
csvParser.close();
}
//go through orgs file to get name, ods and cdb codes
File orgsFile = null;
for (File f : extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_organisation_") > -1
&& name.endsWith(".csv")) {
orgsFile = f;
break;
}
}
csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String name = csvRecord.get("OrganisationName");
String odsCode = csvRecord.get("ODSCode");
String cdb = csvRecord.get("CDB");
names.put(orgGuid, name);
odsCodes.put(orgGuid, odsCode);
cdbNumbers.put(orgGuid, cdb);
}
} finally {
csvParser.close();
}
//go through patients file to get count
File patientFile = null;
for (File f : extractRoot.listFiles()) {
String name = f.getName().toLowerCase();
if (name.indexOf("admin_patient_") > -1
&& name.endsWith(".csv")) {
patientFile = f;
break;
}
}
csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
try {
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String orgGuid = csvRecord.get("OrganisationGuid");
String patientGuid = csvRecord.get("PatientGuid");
String deleted = csvRecord.get("Deleted");
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
if (distinctPatientSet == null) {
distinctPatientSet = new HashSet<>();
distinctPatients.put(orgGuid, distinctPatientSet);
}
if (deleted.equalsIgnoreCase("true")) {
distinctPatientSet.remove(patientGuid);
} else {
distinctPatientSet.add(patientGuid);
}
}
} finally {
csvParser.close();
}
}
}
SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd");
StringBuilder sb = new StringBuilder();
sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients");
for (String orgGuid : startDates.keySet()) {
Date startDate = startDates.get(orgGuid);
String server = servers.get(orgGuid);
String name = names.get(orgGuid);
String odsCode = odsCodes.get(orgGuid);
String cdbNumber = cdbNumbers.get(orgGuid);
Set<String> distinctPatientSet = distinctPatients.get(orgGuid);
String startDateDesc = null;
if (startDate != null) {
startDateDesc = sdfOutput.format(startDate);
}
Long countDistinctPatients = null;
if (distinctPatientSet != null) {
countDistinctPatients = new Long(distinctPatientSet.size());
}
sb.append("\n");
sb.append("\"" + name + "\"");
sb.append(",");
sb.append("\"" + odsCode + "\"");
sb.append(",");
sb.append("\"" + cdbNumber + "\"");
sb.append(",");
sb.append("\"" + orgGuid + "\"");
sb.append(",");
sb.append(startDateDesc);
sb.append(",");
sb.append("\"" + server + "\"");
sb.append(",");
sb.append(countDistinctPatients);
}
LOG.info(sb.toString());
FileUtils.writeStringToFile(new File(outputPath), sb.toString());
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath);
}
private static void findEncounterTerms(String path, String outputPath) {
LOG.info("Finding Encounter Terms from " + path);
Map<String, Long> hmResults = new HashMap<>();
//source term, source term snomed ID, source term snomed term - count
try {
File root = new File(path);
File[] files = root.listFiles();
for (File readerRoot : files) { //emis001
LOG.info("Finding terms in " + readerRoot);
//first read in all the coding files to build up our map of codes
Map<String, String> hmCodes = new HashMap<>();
for (File dateFolder : readerRoot.listFiles()) {
LOG.info("Looking for codes in " + dateFolder);
File f = findFile(dateFolder, "Coding_ClinicalCode");
if (f == null) {
LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String codeId = csvRecord.get("CodeId");
String term = csvRecord.get("Term");
String snomed = csvRecord.get("SnomedCTConceptId");
hmCodes.put(codeId, snomed + ",\"" + term + "\"");
}
csvParser.close();
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
Date cutoff = dateFormat.parse("2017-01-01");
//now process the consultation files themselves
for (File dateFolder : readerRoot.listFiles()) {
LOG.info("Looking for consultations in " + dateFolder);
File f = findFile(dateFolder, "CareRecord_Consultation");
if (f == null) {
LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath());
continue;
}
CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader());
Iterator<CSVRecord> csvIterator = csvParser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String term = csvRecord.get("ConsultationSourceTerm");
String codeId = csvRecord.get("ConsultationSourceCodeId");
if (Strings.isNullOrEmpty(term)
&& Strings.isNullOrEmpty(codeId)) {
continue;
}
String date = csvRecord.get("EffectiveDate");
if (Strings.isNullOrEmpty(date)) {
continue;
}
Date d = dateFormat.parse(date);
if (d.before(cutoff)) {
continue;
}
String line = "\"" + term + "\",";
if (!Strings.isNullOrEmpty(codeId)) {
String codeLookup = hmCodes.get(codeId);
if (codeLookup == null) {
LOG.error("Failed to find lookup for codeID " + codeId);
continue;
}
line += codeLookup;
} else {
line += ",";
}
Long count = hmResults.get(line);
if (count == null) {
count = new Long(1);
} else {
count = new Long(count.longValue() + 1);
}
hmResults.put(line, count);
}
csvParser.close();
}
}
//save results to file
StringBuilder output = new StringBuilder();
output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\"");
output.append("\r\n");
for (String line : hmResults.keySet()) {
Long count = hmResults.get(line);
String combined = line + "," + count;
output.append(combined);
output.append("\r\n");
}
LOG.info("FInished");
LOG.info(output.toString());
FileUtils.writeStringToFile(new File(outputPath), output.toString());
LOG.info("written output to " + outputPath);
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished finding Encounter Terms from " + path);
}
private static File findFile(File root, String token) throws Exception {
for (File f : root.listFiles()) {
String s = f.getName();
if (s.indexOf(token) > -1) {
return f;
}
}
return null;
}
/*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) {
LOG.info("Starting Populating Protocol Queue for " + serviceIdStr);
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
if (serviceIdStr.equalsIgnoreCase("All")) {
serviceIdStr = null;
}
try {
List<Service> services = new ArrayList<>();
if (Strings.isNullOrEmpty(serviceIdStr)) {
services = serviceRepository.getAll();
} else {
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
services.add(service);
}
for (Service service: services) {
List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId());
LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName());
if (startingExchangeId != null) {
UUID startingExchangeUuid = UUID.fromString(startingExchangeId);
if (exchangeIds.contains(startingExchangeUuid)) {
//if in the list, remove everything up to and including the starting exchange
int index = exchangeIds.indexOf(startingExchangeUuid);
LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point");
for (int i=index; i>=0; i--) {
exchangeIds.remove(i);
}
startingExchangeId = null;
} else {
//if not in the list, skip all these exchanges
LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping");
continue;
}
}
QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true);
}
} catch (Exception ex) {
LOG.error("", ex);
}
LOG.info("Finished Populating Protocol Queue for " + serviceIdStr);
}*/
/*private static void findDeletedOrgs() {
LOG.info("Starting finding deleted orgs");
ServiceDalI serviceRepository = DalProvider.factoryServiceDal();
ExchangeDalI auditRepository = DalProvider.factoryExchangeDal();
List<Service> services = new ArrayList<>();
try {
for (Service service: serviceRepository.getAll()) {
services.add(service);
}
} catch (Exception ex) {
LOG.error("", ex);
}
services.sort((o1, o2) -> {
String name1 = o1.getName();
String name2 = o2.getName();
return name1.compareToIgnoreCase(name2);
});
for (Service service: services) {
try {
UUID serviceUuid = service.getId();
List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date());
LOG.info("Service: " + service.getName() + " " + service.getLocalId());
if (exchangeByServices.isEmpty()) {
LOG.info(" no exchange found!");
continue;
}
Exchange exchangeByService = exchangeByServices.get(0);
UUID exchangeId = exchangeByService.getId();
Exchange exchange = auditRepository.getExchange(exchangeId);
Map<String, String> headers = exchange.getHeaders();
String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid);
UUID systemUuid = UUID.fromString(systemUuidStr);
int batches = countBatches(exchangeId, serviceUuid, systemUuid);
LOG.info(" Most recent exchange had " + batches + " batches");
if (batches > 1 && batches < 2000) {
continue;
}
//go back until we find the FIRST exchange where it broke
exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date());
for (int i=0; i<exchangeByServices.size(); i++) {
exchangeByService = exchangeByServices.get(i);
exchangeId = exchangeByService.getId();
batches = countBatches(exchangeId, serviceUuid, systemUuid);
exchange = auditRepository.getExchange(exchangeId);
Date timestamp = exchange.getTimestamp();
if (batches < 1 || batches > 2000) {
LOG.info(" " + timestamp + " had " + batches);
}
if (batches > 1 && batches < 2000) {
LOG.info(" " + timestamp + " had " + batches);
break;
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
LOG.info("Finished finding deleted orgs");
}*/
private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception {
int batches = 0;
ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId);
for (ExchangeTransformAudit audit : audits) {
if (audit.getNumberBatchesCreated() != null) {
batches += audit.getNumberBatchesCreated();
}
}
return batches;
}
/*private static void fixExchanges(UUID justThisService) {
LOG.info("Fixing exchanges");
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId : exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
boolean changed = false;
String body = exchange.getBody();
String[] files = body.split("\n");
if (files.length == 0) {
continue;
}
for (int i=0; i<files.length; i++) {
String original = files[i];
//remove /r characters
String trimmed = original.trim();
//add the new prefix
if (!trimmed.startsWith("sftpreader/EMIS001/")) {
trimmed = "sftpreader/EMIS001/" + trimmed;
}
if (!original.equals(trimmed)) {
files[i] = trimmed;
changed = true;
}
}
if (changed) {
LOG.info("Fixed exchange " + exchangeId);
LOG.info(body);
body = String.join("\n", files);
exchange.setBody(body);
AuditWriter.writeExchange(exchange);
}
}
}
LOG.info("Fixed exchanges");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void deleteDataForService(UUID serviceId) {
Service dbService = new ServiceRepository().getById(serviceId);
//the delete will take some time, so do the delete in a separate thread
LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId());
FhirDeletionService deletor = new FhirDeletionService(dbService);
try {
deletor.deleteData();
LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId());
} catch (Exception ex) {
LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex);
}
}*/
/*private static void testLogging() {
while (true) {
System.out.println("Checking logging at " + System.currentTimeMillis());
try {
Thread.sleep(4000);
} catch (Exception e) {
e.printStackTrace();
}
LOG.trace("trace logging");
LOG.debug("debug logging");
LOG.info("info logging");
LOG.warn("warn logging");
LOG.error("error logging");
}
}
*/
/*private static void fixExchangeProtocols() {
LOG.info("Fixing exchange protocols");
AuditRepository auditRepository = new AuditRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
LOG.info("Processing exchange " + exchangeId);
Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
List<String> newIds = new ArrayList<>();
String protocolJson = headers.get(HeaderKeys.Protocols);
if (!headers.containsKey(HeaderKeys.Protocols)) {
try {
List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr);
// Get protocols where service is publisher
newIds = libraryItemList.stream()
.filter(
libraryItem -> libraryItem.getProtocol().getServiceContract().stream()
.anyMatch(sc ->
sc.getType().equals(ServiceContractType.PUBLISHER)
&& sc.getService().getUuid().equals(serviceIdStr)))
.map(t -> t.getUuid().toString())
.collect(Collectors.toList());
} catch (Exception e) {
LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e);
continue;
}
} else {
try {
JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson);
for (int i = 0; i < node.size(); i++) {
JsonNode libraryItemNode = node.get(i);
JsonNode idNode = libraryItemNode.get("uuid");
String id = idNode.asText();
newIds.add(id);
}
} catch (Exception e) {
LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e);
continue;
}
}
try {
if (newIds.isEmpty()) {
headers.remove(HeaderKeys.Protocols);
} else {
String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray());
headers.put(HeaderKeys.Protocols, protocolsJson);
}
} catch (JsonProcessingException e) {
LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e);
continue;
}
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
} catch (JsonProcessingException e) {
LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e);
continue;
}
auditRepository.save(exchange);
}
LOG.info("Finished fixing exchange protocols");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
if (headers.containsKey(HeaderKeys.SenderLocalIdentifier)
&& headers.containsKey(HeaderKeys.SenderOrganisationUuid)) {
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
Map<UUID, String> orgMap = service.getOrganisations();
if (orgMap.size() != 1) {
LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId());
continue;
}
UUID orgId = orgMap
.keySet()
.stream()
.collect(StreamExtension.firstOrNullCollector());
Organisation organisation = organisationRepository.getById(orgId);
String odsCode = organisation.getNationalId();
headers.put(HeaderKeys.SenderLocalIdentifier, odsCode);
headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString());
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Creating exchange " + exchange.getExchangeId());
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void fixExchangeHeaders() {
LOG.info("Fixing exchange headers");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
OrganisationRepository organisationRepository = new OrganisationRepository();
LibraryRepository libraryRepository = new LibraryRepository();
List<Exchange> exchanges = new AuditRepository().getAllExchanges();
for (Exchange exchange: exchanges) {
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception ex) {
LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex);
continue;
}
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
if (Strings.isNullOrEmpty(serviceIdStr)) {
LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId());
continue;
}
boolean changed = false;
UUID serviceId = UUID.fromString(serviceIdStr);
Service service = serviceRepository.getById(serviceId);
try {
List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint : endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())) {
if (!headers.containsKey(HeaderKeys.SourceSystem)) {
headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SystemVersion)) {
headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion());
changed = true;
}
if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) {
headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString());
changed = true;
}
}
}
}
} catch (Exception e) {
LOG.error("Failed to find endpoint details for " + exchange.getExchangeId());
continue;
}
if (changed) {
try {
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
LOG.info("Fixed exchange " + exchange.getExchangeId());
}
}
LOG.info("Finished fixing exchange headers");
}*/
/*private static void testConnection(String configName) {
try {
JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise");
String driverClass = config.get("driverClass").asText();
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName(driverClass);
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
/*private static void testConnection() {
try {
JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise");
String url = config.get("url").asText();
String username = config.get("username").asText();
String password = config.get("password").asText();
//force the driver to be loaded
Class.forName("org.postgresql.Driver");
Connection conn = DriverManager.getConnection(url, username, password);
conn.setAutoCommit(false);
LOG.info("Connection ok");
conn.close();
} catch (Exception e) {
LOG.error("", e);
}
}*/
*//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) {
}*//*
/*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception {
LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom);
LOG.info("Testing database connection");
testConnection(configName);
Service service = new ServiceRepository().getById(serviceId);
List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet());
UUID orgId = orgIds.get(0);
List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE);
for (int i=exchangeByServiceList.size()-1; i>=0; i--) {
ExchangeByService exchangeByService = exchangeByServiceList.get(i);
//for (ExchangeByService exchangeByService: exchangeByServiceList) {
UUID exchangeId = exchangeByService.getExchangeId();
if (exchangeIdStartFrom != null) {
if (!exchangeIdStartFrom.equals(exchangeId)) {
continue;
} else {
//once we have a match, set to null so we don't skip any subsequent ones
exchangeIdStartFrom = null;
}
}
Exchange exchange = AuditWriter.readExchange(exchangeId);
String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid);
UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr);
//this one had 90,000 batches and doesn't need doing again
LOG.info("Skipping exchange " + exchangeId);
continue;
List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId);
LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches");
for (int j=0; j<exchangeBatches.size(); j++) {
ExchangeBatch exchangeBatch = exchangeBatches.get(j);
UUID batchId = exchangeBatch.getBatchId();
if (batchIdStartFrom != null) {
if (!batchIdStartFrom.equals(batchId)) {
continue;
} else {
batchIdStartFrom = null;
}
}
LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size());
try {
String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null);
if (!Strings.isNullOrEmpty(outbound)) {
EnterpriseFiler.file(outbound, configName);
}
} catch (Exception ex) {
throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex);
}
}
}
}*/
/*private static void fixMissingExchanges() {
LOG.info("Fixing missing exchanges");
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;");
stmt.setFetchSize(100);
Set<UUID> exchangeIdsDone = new HashSet<>();
AuditRepository auditRepository = new AuditRepository();
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
UUID batchId = row.get(1, UUID.class);
Date date = row.getTimestamp(2);
//LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date);
if (exchangeIdsDone.contains(exchangeId)) {
continue;
}
if (auditRepository.getExchange(exchangeId) != null) {
continue;
}
UUID serviceId = findServiceId(batchId, session);
if (serviceId == null) {
continue;
}
Exchange exchange = new Exchange();
ExchangeByService exchangeByService = new ExchangeByService();
ExchangeEvent exchangeEvent = new ExchangeEvent();
Map<String, String> headers = new HashMap<>();
headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString());
String headersJson = null;
try {
headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
} catch (JsonProcessingException e) {
//not throwing this exception further up, since it should never happen
//and means we don't need to litter try/catches everywhere this is called from
LOG.error("Failed to write exchange headers to Json", e);
continue;
}
exchange.setBody("Body not available, as exchange re-created");
exchange.setExchangeId(exchangeId);
exchange.setHeaders(headersJson);
exchange.setTimestamp(date);
exchangeByService.setExchangeId(exchangeId);
exchangeByService.setServiceId(serviceId);
exchangeByService.setTimestamp(date);
exchangeEvent.setEventDesc("Created_By_Conversion");
exchangeEvent.setExchangeId(exchangeId);
exchangeEvent.setTimestamp(new Date());
auditRepository.save(exchange);
auditRepository.save(exchangeEvent);
auditRepository.save(exchangeByService);
exchangeIdsDone.add(exchangeId);
LOG.info("Creating exchange " + exchangeId);
}
LOG.info("Finished exchange fix");
}
private static UUID findServiceId(UUID batchId, Session session) {
Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;");
ResultSet rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId);
return null;
}
Row row = rs.one();
String resourceType = row.getString(0);
UUID resourceId = row.get(1, UUID.class);
stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;");
rs = session.execute(stmt);
if (rs.isExhausted()) {
LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId);
return null;
}
row = rs.one();
UUID serviceId = row.get(0, UUID.class);
return serviceId;
}*/
/*private static void fixExchangeEvents() {
List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents();
for (ExchangeEvent event: events) {
if (event.getEventDesc() != null) {
continue;
}
String eventDesc = "";
int eventType = event.getEvent().intValue();
switch (eventType) {
case 1:
eventDesc = "Receive";
break;
case 2:
eventDesc = "Validate";
break;
case 3:
eventDesc = "Transform_Start";
break;
case 4:
eventDesc = "Transform_End";
break;
case 5:
eventDesc = "Send";
break;
default:
eventDesc = "??? " + eventType;
}
event.setEventDesc(eventDesc);
new AuditRepository().save(null, event);
}
}*/
*//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid);
}*//*
/*private static void fixExchanges() {
AuditRepository auditRepository = new AuditRepository();
Map<UUID, Set<UUID>> existingOnes = new HashMap();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
List<Exchange> exchanges = auditRepository.getAllExchanges();
for (Exchange exchange: exchanges) {
UUID exchangeUuid = exchange.getExchangeId();
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson);
continue;
}
if (serviceId == null) {
LOG.warn("No service ID found for exchange " + exchange.getExchangeId());
continue;
}
UUID serviceUuid = UUID.fromString(serviceId);
Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid);
if (exchangeIdsDone == null) {
exchangeIdsDone = new HashSet<>();
List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE);
for (ExchangeByService exchangeByService: exchangeByServices) {
exchangeIdsDone.add(exchangeByService.getExchangeId());
}
existingOnes.put(serviceUuid, exchangeIdsDone);
}
//create the exchange by service entity
if (!exchangeIdsDone.contains(exchangeUuid)) {
Date timestamp = exchange.getTimestamp();
ExchangeByService newOne = new ExchangeByService();
newOne.setExchangeId(exchangeUuid);
newOne.setServiceId(serviceUuid);
newOne.setTimestamp(timestamp);
auditRepository.save(newOne);
try {
headers.remove(HeaderKeys.BatchIdsJson);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
if (!headers.containsKey(HeaderKeys.BatchIdsJson)) {
//fix the batch IDs not being in the exchange
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid);
if (!batches.isEmpty()) {
List<UUID> batchUuids = batches
.stream()
.map(t -> t.getBatchId())
.collect(Collectors.toList());
try {
String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray());
headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr);
String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(newHeaderJson);
auditRepository.save(exchange, null);
} catch (JsonProcessingException e) {
LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e);
}
}
//}
}
}*/
/*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException {
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {});
for (JsonServiceInterfaceEndpoint endpoint: endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString();
LibraryRepository libraryRepository = new LibraryRepository();
ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId);
Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId());
LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent());
System system = libraryItem.getSystem();
for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) {
if (endpointInterfaceId.equals(technicalInterface.getUuid())
&& technicalInterface.getMessageFormat().equalsIgnoreCase(software)
&& technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) {
return endpointSystemId;
}
}
}
} catch (Exception e) {
throw new PipelineException("Failed to process endpoints from service " + service.getId());
}
return null;
}
*/
/*private static void addSystemIdToExchangeHeaders() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as no service UUID");
continue;
}
if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " as already got system UUID");
continue;
}
try {
//work out service ID
String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid);
UUID serviceId = UUID.fromString(serviceIdStr);
String software = headers.get(HeaderKeys.SourceSystem);
String version = headers.get(HeaderKeys.SystemVersion);
Service service = serviceRepository.getById(serviceId);
UUID systemUuid = findSystemId(service, software, version);
headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString());
//work out protocol IDs
try {
String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr);
headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson);
} catch (Exception ex) {
LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage());
}
//save to DB
headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers);
exchange.setHeaders(headerJson);
auditRepository.save(exchange);
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}*/
/*private static void populateExchangeBatchPatients() throws Exception {
LOG.info("populateExchangeBatchPatients");
AuditRepository auditRepository = new AuditRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
//ServiceRepository serviceRepository = new ServiceRepository();
//OrganisationRepository organisationRepository = new OrganisationRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID exchangeId = row.get(0, UUID.class);
org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId);
String headerJson = exchange.getHeaders();
HashMap<String, String> headers = null;
try {
headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class);
} catch (Exception e) {
LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson);
continue;
}
if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))
|| Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) {
LOG.info("Skipping exchange " + exchangeId + " because no service or system in header");
continue;
}
try {
UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid));
UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid));
List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch exchangeBatch : exchangeBatches) {
if (exchangeBatch.getEdsPatientId() != null) {
continue;
}
UUID batchId = exchangeBatch.getBatchId();
List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString());
if (resourceWrappers.isEmpty()) {
continue;
}
List<UUID> patientIds = new ArrayList<>();
for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) {
UUID patientId = resourceWrapper.getResourceId();
if (resourceWrapper.getIsDeleted()) {
deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId);
}
if (!patientIds.contains(patientId)) {
patientIds.add(patientId);
}
}
if (patientIds.size() != 1) {
LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs");
continue;
}
UUID patientId = patientIds.get(0);
exchangeBatch.setEdsPatientId(patientId);
exchangeBatchRepository.save(exchangeBatch);
}
} catch (Exception ex) {
LOG.error("Error with exchange " + exchangeId, ex);
}
}
LOG.info("Finished populateExchangeBatchPatients");
}
private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception {
FhirStorageService storageService = new FhirStorageService(serviceId, systemId);
ResourceRepository resourceRepository = new ResourceRepository();
List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId);
for (ResourceByPatient resourceWrapper: resourceWrappers) {
String json = resourceWrapper.getResourceData();
Resource resource = new JsonParser().parse(json);
storageService.exchangeBatchDelete(exchangeId, batchId, resource);
}
}*/
/*private static void convertPatientSearch() {
LOG.info("Converting Patient Search");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData());
String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient());
ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId));
if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) {
continue;
}
Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData());
PatientSearchHelper.update(serviceId, systemId, patient);
PatientSearchHelper.update(serviceId, systemId, episodeOfCare);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Search");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static List<UUID> findSystemIds(Service service) throws Exception {
List<UUID> ret = new ArrayList<>();
List<JsonServiceInterfaceEndpoint> endpoints = null;
try {
endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {
});
for (JsonServiceInterfaceEndpoint endpoint : endpoints) {
UUID endpointSystemId = endpoint.getSystemUuid();
ret.add(endpointSystemId);
}
} catch (Exception e) {
throw new Exception("Failed to process endpoints from service " + service.getId());
}
return ret;
}
/*private static void convertPatientLink() {
LOG.info("Converting Patient Link");
ResourceRepository resourceRepository = new ResourceRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
LOG.info("Doing service " + service.getName());
for (UUID systemId : findSystemIds(service)) {
List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString());
for (ResourceByService resourceWrapper: resourceWrappers) {
if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) {
continue;
}
try {
Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData());
PatientLinkHelper.updatePersonId(patient);
} catch (Exception ex) {
LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex);
}
}
}
}
LOG.info("Converted Patient Link");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, ResourceHistory> resourcesFixed = new HashMap<>();
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>();
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
ProblemPreTransformer.transform(version, parsers, filer, helper);
ObservationPreTransformer.transform(version, parsers, filer, helper);
DrugRecordPreTransformer.transform(version, parsers, filer, helper);
IssueRecordPreTransformer.transform(version, parsers, filer, helper);
DiaryPreTransformer.transform(version, parsers, filer, helper);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential()
&& !patientParser.getDeleted()) {
PatientTransformer.createResource(patientParser, filer, helper, version);
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
ConsultationTransformer.createResource(consultationParser, filer, helper, version);
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
ObservationTransformer.createResource(observationParser, filer, helper, version);
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
DiaryTransformer.createResource(diaryParser, filer, helper, version);
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version);
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version);
}
}
issueRecordParser.close();
filer.waitToFinish(); //just to close the thread pool, even though it's not been used
List<Resource> resources = filer.getNewResources();
for (Resource resource: resources) {
String patientId = IdHelper.getPatientId(resource);
UUID edsPatientId = UUID.fromString(patientId);
ResourceType resourceType = resource.getResourceType();
UUID resourceId = UUID.fromString(resource.getId());
boolean foundResourceInDbBatch = false;
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds != null) {
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
}
foundResourceInDbBatch = true;
for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (!Strings.isNullOrEmpty(json)) {
LOG.warn("JSON already in resource " + resourceType + " " + resourceId);
} else {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid);
}
resourceHistory.setIsDeleted(false);
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceHistory.setSchemaVersion("0.1");
resourceRepository.save(resourceByExchangeBatch);
resourceRepository.save(resourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
String key = resourceType.toString() + ":" + resourceId;
resourcesFixed.put(key, resourceHistory);
}
//if a patient became confidential, we will have deleted all resources for that
//patient, so we need to undo that too
//to undelete WHOLE patient record
//1. if THIS resource is a patient
//2. get all other deletes from the same exchange batch
//3. delete those from resource_by_exchange_batch (the deleted ones only)
//4. delete same ones from resource_history
//5. retrieve most recent resource_history
//6. if not deleted, add to resources fixed
if (resourceType == ResourceType.Patient) {
List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId);
LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId);
for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) {
if (!resourceInSameBatch.getIsDeleted()) {
continue;
}
//patient and episode resources will be restored by the above stuff, so don't try
//to do it again
if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString())
|| resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion());
mapperResourceByExchangeBatch.delete(resourceInSameBatch);
mapperResourceHistory.delete(deletedResourceHistory);
batchIdsToPutInProtocolQueue.add(batchId);
//check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table
ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId());
if (mostRecentDeletedResourceHistory != null
&& !mostRecentDeletedResourceHistory.getIsDeleted()) {
String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId();
resourcesFixed.put(key2, mostRecentDeletedResourceHistory);
}
}
}
}
}
}
//if we didn't find records in the DB to update, then
if (!foundResourceInDbBatch) {
//we can't generate a back-dated time UUID, but we need one so the resource_history
//table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange,
//and the batch ID is actually a time UUID that was allocated around the right time
ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId);
//if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange
//and we'll pick up the same patient data in a following exchange
if (firstBatch == null) {
continue;
}
UUID versionUuid = firstBatch.getBatchId();
//find suitable batch ID
UUID batchId = null;
if (batchIds != null
&& batchIds.size() > 0) {
batchId = batchIds.get(batchIds.size()-1);
} else {
//create new batch ID if not found
ExchangeBatch exchangeBatch = new ExchangeBatch();
exchangeBatch.setBatchId(UUIDs.timeBased());
exchangeBatch.setExchangeId(exchangeId);
exchangeBatch.setInsertedAt(new Date());
exchangeBatch.setEdsPatientId(edsPatientId);
exchangeBatchRepository.save(exchangeBatch);
batchId = exchangeBatch.getBatchId();
//add to map for next resource
if (batchIds == null) {
batchIds = new ArrayList<>();
}
batchIds.add(batchId);
batchesPerPatient.put(edsPatientId, batchIds);
}
String json = parserPool.composeString(resource);
ResourceHistory resourceHistory = new ResourceHistory();
resourceHistory.setResourceId(resourceId);
resourceHistory.setResourceType(resourceType.toString());
resourceHistory.setVersion(versionUuid);
resourceHistory.setCreatedAt(new Date());
resourceHistory.setServiceId(serviceId);
resourceHistory.setSystemId(systemId);
resourceHistory.setIsDeleted(false);
resourceHistory.setSchemaVersion("0.1");
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch();
resourceByExchangeBatch.setBatchId(batchId);
resourceByExchangeBatch.setExchangeId(exchangeId);
resourceByExchangeBatch.setResourceType(resourceType.toString());
resourceByExchangeBatch.setResourceId(resourceId);
resourceByExchangeBatch.setVersion(versionUuid);
resourceByExchangeBatch.setIsDeleted(false);
resourceByExchangeBatch.setSchemaVersion("0.1");
resourceByExchangeBatch.setResourceData(json);
resourceRepository.save(resourceHistory);
resourceRepository.save(resourceByExchangeBatch);
batchIdsToPutInProtocolQueue.add(batchId);
}
}
if (!batchIdsToPutInProtocolQueue.isEmpty()) {
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue);
}
}
//update the resource_by_service table (and the resource_by_patient view)
for (ResourceHistory resourceHistory: resourcesFixed.values()) {
UUID latestVersionUpdatedUuid = resourceHistory.getVersion();
ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId());
UUID latestVersionUuid = latestVersion.getVersion();
//if there have been subsequent updates to the resource, then skip it
if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) {
continue;
}
Resource resource = parserPool.parse(resourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment)metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(resourceHistory.getServiceId());
resourceByService.setSystemId(resourceHistory.getSystemId());
resourceByService.setResourceType(resourceHistory.getResourceType());
resourceByService.setResourceId(resourceHistory.getResourceId());
resourceByService.setCurrentVersion(resourceHistory.getVersion());
resourceByService.setUpdatedAt(resourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(resourceHistory.getResourceData());
resourceRepository.save(resourceByService);
//call out to our patient search and person matching services
if (resource instanceof Patient) {
PatientLinkHelper.updatePersonId((Patient)resource);
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource);
} else if (resource instanceof EpisodeOfCare) {
PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource);
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Fixing Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) {
LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager();
Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class);
Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class);
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers);
//find any deleted patients
List<UUID> deletedPatientUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getDeleted()) {
//find the EDS patient ID for this local guid
String patientGuid = patientParser.getPatientGuid();
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
deletedPatientUuids.add(edsPatientId);
}
}
patientParser.close();
//go through the appts file to find properly deleted appt GUIDS
List<UUID> deletedApptUuids = new ArrayList<>();
org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class);
while (apptParser.nextRecord()) {
if (apptParser.getDeleted()) {
String patientGuid = apptParser.getPatientGuid();
String slotGuid = apptParser.getSlotGuid();
if (!Strings.isNullOrEmpty(patientGuid)) {
String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid);
UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId);
deletedApptUuids.add(edsApptId);
}
}
}
apptParser.close();
for (UUID edsPatientId : deletedPatientUuids) {
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
}
for (UUID batchId : batchIds) {
List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString());
for (ResourceByExchangeBatch apptWrapper : apptWrappers) {
//ignore non-deleted appts
if (!apptWrapper.getIsDeleted()) {
continue;
}
//if the appt was deleted legitamately, then skip it
UUID apptId = apptWrapper.getResourceId();
if (deletedApptUuids.contains(apptId)) {
continue;
}
ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion());
if (saveChanges) {
mapperResourceByExchangeBatch.delete(apptWrapper);
mapperResourceHistory.delete(deletedResourceHistory);
}
LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId);
//now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table
ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId());
if (mostRecentResourceHistory != null
&& !mostRecentResourceHistory.getIsDeleted()) {
Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData());
ResourceMetadata metadata = MetadataFactory.createMetadata(resource);
UUID patientId = ((PatientCompartment) metadata).getPatientId();
ResourceByService resourceByService = new ResourceByService();
resourceByService.setServiceId(mostRecentResourceHistory.getServiceId());
resourceByService.setSystemId(mostRecentResourceHistory.getSystemId());
resourceByService.setResourceType(mostRecentResourceHistory.getResourceType());
resourceByService.setResourceId(mostRecentResourceHistory.getResourceId());
resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion());
resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt());
resourceByService.setPatientId(patientId);
resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion());
resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata));
resourceByService.setResourceData(mostRecentResourceHistory.getResourceData());
if (saveChanges) {
resourceRepository.save(resourceByService);
}
LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table");
}
}
}
}
}
}
LOG.info("Finished Deleted Appointments Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
private static void fixSlotReferencesForPublisher(String publisher) {
try {
ServiceDalI dal = DalProvider.factoryServiceDal();
List<Service> services = dal.getAll();
for (Service service: services) {
if (service.getPublisherConfigName() != null
&& service.getPublisherConfigName().equals(publisher)) {
fixSlotReferences(service.getId());
}
}
} catch (Exception ex) {
LOG.error("", ex);
}
}
private static void fixSlotReferences(UUID serviceId) {
LOG.info("Fixing Slot References in Appointments for " + serviceId);
try {
//get patient IDs from patient search
List<UUID> patientIds = new ArrayList<>();
EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
Statement statement = connection.createStatement();
String sql = "SELECT eds_id FROM resource_id_map WHERE service_id = '" + serviceId + "' AND resource_type = '" + ResourceType.Patient + "';";
ResultSet rs = statement.executeQuery(sql);
while (rs.next()) {
String patientUuid = rs.getString(1);
patientIds.add(UUID.fromString(patientUuid));
}
rs.close();
statement.close();
connection.close();
LOG.debug("Found " + patientIds.size() + " patients");
int done = 0;
int fixed = 0;
ResourceDalI resourceDal = DalProvider.factoryResourceDal();
EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true, null);
//for each patient
for (UUID patientUuid: patientIds) {
//LOG.debug("Checking patient " + patientUuid);
//get all appointment resources
List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString());
for (ResourceWrapper apptWrapper: appointmentWrappers) {
//LOG.debug("Checking appointment " + apptWrapper.getResourceId());
List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId());
//the above returns most recent first, but we want to do them in order
historyWrappers = Lists.reverse(historyWrappers);
for (ResourceWrapper historyWrapper : historyWrappers) {
if (historyWrapper.isDeleted()) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted");
continue;
}
String json = historyWrapper.getResourceData();
Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json);
if (!appt.hasSlot()) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot");
continue;
}
if (appt.getSlot().size() != 1) {
throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs");
}
Reference slotRef = appt.getSlot().get(0);
//test if slot reference exists
Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef);
String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef);
if (slotSourceId.indexOf(":") > -1) {
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot");
continue;
}
//if not, correct slot reference
Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId());
Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference);
String sourceId = ReferenceHelper.getReferenceId(apptLocalReference);
Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId);
Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper);
String slotEdsReferenceValue = slotEdsReference.getReference();
String oldSlotRefValue = slotRef.getReference();
slotRef.setReference(slotEdsReferenceValue);
//LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue);
//save appointment
json = FhirSerializationHelper.serializeResource(appt);
historyWrapper.setResourceData(json);
saveResourceWrapper(serviceId, historyWrapper);
fixed++;
}
}
done ++;
if (done % 1000 == 0) {
LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts");
}
}
LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts");
LOG.info("Finished Fixing Slot References in Appointments for " + serviceId);
} catch (Exception ex) {
LOG.error("", ex);
}
}
/*private static void fixReviews(String sharedStoragePath, UUID justThisService) {
LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService);
ResourceRepository resourceRepository = new ResourceRepository();
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ParserPool parserPool = new ParserPool();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
Map<String, Long> problemCodes = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches");
for (ExchangeBatch batch: batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class);
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (problemParser.nextRecord()) {
String patientGuid = problemParser.getPatientGuid();
String observationGuid = problemParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (!problemCodes.containsKey(key)) {
problemCodes.put(key, null);
}
}
problemParser.close();
while (observationParser.nextRecord()) {
String patientGuid = observationParser.getPatientGuid();
String observationGuid = observationParser.getObservationGuid();
String key = patientGuid + ":" + observationGuid;
if (problemCodes.containsKey(key)) {
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
problemCodes.put(key, codeId);
}
}
observationParser.close();
LOG.info("Found " + problemCodes.size() + " problem codes so far");
String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f);
EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId);
while (observationParser.nextRecord()) {
String problemGuid = observationParser.getProblemGuid();
if (!Strings.isNullOrEmpty(problemGuid)) {
String patientGuid = observationParser.getPatientGuid();
Long codeId = observationParser.getCodeId();
if (codeId == null) {
continue;
}
String key = patientGuid + ":" + problemGuid;
Long problemCodeId = problemCodes.get(key);
if (problemCodeId == null
|| problemCodeId.longValue() != codeId.longValue()) {
continue;
}
//if here, our code is the same as the problem, so it's a review
String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid();
ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper);
for (UUID systemId: systemIds) {
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid);
}
UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
if (edsObservationId == null) {
//try observations as diagnostic reports, because it could be one of those instead
if (resourceType == ResourceType.Observation) {
resourceType = ResourceType.DiagnosticReport;
edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId);
}
if (edsObservationId == null) {
throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId);
}
}
List<UUID> batchIds = batchesPerPatient.get(edsPatientId);
if (batchIds == null) {
//if there are no batches for this patient, we'll be handling this data in another exchange
continue;
//throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId);
}
for (UUID batchId: batchIds) {
List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId);
if (resourceByExchangeBatches.isEmpty()) {
//if we've deleted data, this will be null
continue;
//throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId);
}
for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) {
String json = resourceByExchangeBatch.getResourceData();
if (Strings.isNullOrEmpty(json)) {
throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId);
}
Resource resource = parserPool.parse(json);
if (addReviewExtension((DomainResource)resource)) {
json = parserPool.composeString(resource);
resourceByExchangeBatch.setResourceData(json);
LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId);
resourceRepository.save(resourceByExchangeBatch);
UUID versionUuid = resourceByExchangeBatch.getVersion();
ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid);
if (resourceHistory == null) {
throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid);
}
resourceHistory.setResourceData(json);
resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json));
resourceRepository.save(resourceHistory);
ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId);
if (resourceByService != null) {
UUID serviceVersionUuid = resourceByService.getCurrentVersion();
if (serviceVersionUuid.equals(versionUuid)) {
resourceByService.setResourceData(json);
resourceRepository.save(resourceByService);
}
}
} else {
LOG.info("" + resourceType + " " + edsObservationId + " already has extension");
}
}
}
}
//1. find out resource type originall saved from
//2. retrieve from resource_by_exchange_batch
//3. update resource in resource_by_exchange_batch
//4. retrieve from resource_history
//5. update resource_history
//6. retrieve record from resource_by_service
//7. if resource_by_service version UUID matches the resource_history updated, then update that too
}
}
observationParser.close();
}
}
LOG.info("Finished Fixing Reviews");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static boolean addReviewExtension(DomainResource resource) {
if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) {
return false;
}
Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true));
resource.addExtension(extension);
return true;
}*/
/*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) {
LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService);
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
try {
Iterable<Service> iterable = new ServiceRepository().getAll();
for (Service service : iterable) {
UUID serviceId = service.getId();
if (justThisService != null
&& !service.getId().equals(justThisService)) {
LOG.info("Skipping service " + service.getName());
continue;
}
//once we match the servce, set this to null to do all other services
justThisService = null;
LOG.info("Doing service " + service.getName());
List<UUID> systemIds = findSystemIds(service);
List<String> interestingPatientGuids = new ArrayList<>();
Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>();
List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId);
for (UUID exchangeId: exchangeIds) {
Exchange exchange = AuditWriter.readExchange(exchangeId);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) {
continue;
}
String body = exchange.getBody();
String[] files = body.split(java.lang.System.lineSeparator());
if (files.length == 0) {
continue;
}
LOG.info("Doing Emis CSV exchange " + exchangeId);
Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>();
List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId);
for (ExchangeBatch batch : batches) {
UUID patientId = batch.getEdsPatientId();
if (patientId != null) {
List<UUID> batchIds = batchesPerPatient.get(patientId);
if (batchIds == null) {
batchIds = new ArrayList<>();
batchesPerPatient.put(patientId, batchIds);
}
batchIds.add(batch.getBatchId());
}
}
batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient);
File f = new File(sharedStoragePath, files[0]);
File dir = f.getParentFile();
String version = EmisCsvToFhirTransformer.determineVersion(dir);
Map<Class, AbstractCsvParser> parsers = new HashMap<>();
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers);
EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers);
org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class);
while (patientParser.nextRecord()) {
if (patientParser.getIsConfidential() || patientParser.getDeleted()) {
interestingPatientGuids.add(patientParser.getPatientGuid());
}
}
patientParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class);
while (consultationParser.nextRecord()) {
if (consultationParser.getIsConfidential()
&& !consultationParser.getDeleted()) {
interestingPatientGuids.add(consultationParser.getPatientGuid());
}
}
consultationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class);
while (observationParser.nextRecord()) {
if (observationParser.getIsConfidential()
&& !observationParser.getDeleted()) {
interestingPatientGuids.add(observationParser.getPatientGuid());
}
}
observationParser.close();
org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class);
while (diaryParser.nextRecord()) {
if (diaryParser.getIsConfidential()
&& !diaryParser.getDeleted()) {
interestingPatientGuids.add(diaryParser.getPatientGuid());
}
}
diaryParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class);
while (drugRecordParser.nextRecord()) {
if (drugRecordParser.getIsConfidential()
&& !drugRecordParser.getDeleted()) {
interestingPatientGuids.add(drugRecordParser.getPatientGuid());
}
}
drugRecordParser.close();
org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class);
while (issueRecordParser.nextRecord()) {
if (issueRecordParser.getIsConfidential()
&& !issueRecordParser.getDeleted()) {
interestingPatientGuids.add(issueRecordParser.getPatientGuid());
}
}
issueRecordParser.close();
}
Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>();
for (String interestingPatientGuid: interestingPatientGuids) {
if (systemIds.size() > 1) {
throw new Exception("Multiple system IDs for service " + serviceId);
}
UUID systemId = systemIds.get(0);
UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid);
if (edsPatientId == null) {
throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid);
}
for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) {
Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId);
List<UUID> batches = batchesPerPatient.get(edsPatientId);
if (batches != null) {
Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
if (batchesForExchange == null) {
batchesForExchange = new HashSet<>();
exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange);
}
batchesForExchange.addAll(batches);
}
}
}
if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) {
//find the config for our protocol queue
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) {
Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
}
}
LOG.info("Finished Running Protocols for Confidential Patients");
} catch (Exception ex) {
LOG.error("", ex);
}
}*/
/*private static void fixOrgs() {
LOG.info("Posting orgs to protocol queue");
String[] orgIds = new String[]{
"332f31a2-7b28-47cb-af6f-18f65440d43d",
"c893d66b-eb89-4657-9f53-94c5867e7ed9"};
ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository();
ResourceRepository resourceRepository = new ResourceRepository();
Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>();
for (String orgId: orgIds) {
LOG.info("Doing org ID " + orgId);
UUID orgUuid = UUID.fromString(orgId);
try {
//select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING;
ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid);
UUID batchId = resourceByExchangeBatch.getBatchId();
//select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING;
ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId);
UUID exchangeId = exchangeBatch.getExchangeId();
Set<UUID> list = exchangeBatches.get(exchangeId);
if (list == null) {
list = new HashSet<>();
exchangeBatches.put(exchangeId, list);
}
list.add(batchId);
} catch (Exception ex) {
LOG.error("", ex);
break;
}
}
try {
//find the config for our protocol queue (which is in the inbound config)
String configXml = ConfigManager.getConfiguration("inbound", "queuereader");
//the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary
QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml);
Pipeline pipeline = configuration.getPipeline();
PostMessageToExchangeConfig config = pipeline
.getPipelineComponents()
.stream()
.filter(t -> t instanceof PostMessageToExchangeConfig)
.map(t -> (PostMessageToExchangeConfig) t)
.filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol"))
.collect(StreamExtension.singleOrNullCollector());
//post to the protocol exchange
for (UUID exchangeId : exchangeBatches.keySet()) {
Set<UUID> batchIds = exchangeBatches.get(exchangeId);
org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId);
String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds);
exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString);
LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString);
PostMessageToExchange component = new PostMessageToExchange(config);
component.process(exchange);
}
} catch (Exception ex) {
LOG.error("", ex);
return;
}
LOG.info("Finished posting orgs to protocol queue");
}*/
/*private static void findCodes() {
LOG.info("Finding missing codes");
AuditRepository auditRepository = new AuditRepository();
ServiceRepository serviceRepository = new ServiceRepository();
Session session = CassandraConnector.getInstance().getSession();
Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;");
stmt.setFetchSize(100);
ResultSet rs = session.execute(stmt);
while (!rs.isExhausted()) {
Row row = rs.one();
UUID serviceId = row.get(0, UUID.class);
UUID systemId = row.get(1, UUID.class);
UUID exchangeId = row.get(2, UUID.class);
UUID version = row.get(3, UUID.class);
ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version);
String xml = audit.getErrorXml();
if (xml == null) {
continue;
}
String codePrefix = "Failed to find clinical code CodeableConcept for codeId ";
int codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " clinical code " + code + " from " + audit.getStarted());
continue;
}
codePrefix = "Failed to find medication CodeableConcept for codeId ";
codeIndex = xml.indexOf(codePrefix);
if (codeIndex > -1) {
int startIndex = codeIndex + codePrefix.length();
int tagEndIndex = xml.indexOf("<", startIndex);
String code = xml.substring(startIndex, tagEndIndex);
Service service = serviceRepository.getById(serviceId);
String name = service.getName();
LOG.info(name + " drug code " + code + " from " + audit.getStarted());
continue;
}
}
LOG.info("Finished finding missing codes");
}*/
private static void createEmisSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Emis Subset");
try {
Set<String> patientGuids = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
patientGuids.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createEmisSubsetForFile(sourceDir, destDir, patientGuids);
LOG.info("Finished Creating Emis Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createEmisSubsetForFile(File sourceDir, File destDir, Set<String> patientGuids) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createEmisSubsetForFile(sourceFile, destFile, patientGuids);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
CSVFormat format = CSVFormat.DEFAULT.withHeader();
InputStreamReader reader = new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)));
CSVParser parser = new CSVParser(reader, format);
String filterColumn = null;
Map<String, Integer> headerMap = parser.getHeaderMap();
if (headerMap.containsKey("PatientGuid")) {
filterColumn = "PatientGuid";
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFile)));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientGuid = csvRecord.get(filterColumn);
if (Strings.isNullOrEmpty(patientGuid) //if empty, carry over this record
|| patientGuids.contains(patientGuid)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating TPP Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createTppSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating TPP Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
//LOG.info("Doing dir " + sourceFile);
createTppSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
Charset encoding = Charset.forName("CP1252");
InputStreamReader reader =
new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)), encoding);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader();
CSVParser parser = new CSVParser(reader, format);
String filterColumn = null;
Map<String, Integer> headerMap = parser.getHeaderMap();
if (headerMap.containsKey("IDPatient")) {
filterColumn = "IDPatient";
} else if (name.equalsIgnoreCase("SRPatient.csv")) {
filterColumn = "RowIdentifier";
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFile), encoding));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
/*} else {
//the 2.1 files are going to be a pain to split by patient, so just copy them over
LOG.info("Copying 2.1 file " + sourceFile);
copyFile(sourceFile, destFile);
}*/
}
}
}
private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Vision Subset");
try {
Set<String> personIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
personIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createVisionSubsetForFile(sourceDir, destDir, personIds);
LOG.info("Finished Creating Vision Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createVisionSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL);
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
if (name.contains("encounter_data") || name.contains("journal_data") ||
name.contains("patient_data") || name.contains("referral_data")) {
filterColumn = 0;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Homerton Subset");
try {
Set<String> PersonIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
PersonIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createHomertonSubsetForFile(sourceDir, destDir, PersonIds);
LOG.info("Finished Creating Homerton Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createHomertonSubsetForFile(sourceFile, destFile, personIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withHeader();
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//PersonId column at 1
if (name.contains("ENCOUNTER") || name.contains("PATIENT")) {
filterColumn = 1;
} else if (name.contains("DIAGNOSIS")) {
//PersonId column at 13
filterColumn = 13;
} else if (name.contains("ALLERGY")) {
//PersonId column at 2
filterColumn = 2;
} else if (name.contains("PROBLEM")) {
//PersonId column at 4
filterColumn = 4;
} else {
//if no patient column, just copy the file (i.e. PROCEDURE)
parser.close();
LOG.info("Copying file without PatientId " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
Map<String, Integer> headerMap = parser.getHeaderMap();
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String patientId = csvRecord.get(filterColumn);
if (personIds.contains(patientId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) {
LOG.info("Creating Adastra Subset");
try {
Set<String> caseIds = new HashSet<>();
List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath());
for (String line : lines) {
line = line.trim();
//ignore comments
if (line.startsWith("
continue;
}
//adastra extract files are all keyed on caseId
caseIds.add(line);
}
File sourceDir = new File(sourceDirPath);
File destDir = new File(destDirPath);
if (!destDir.exists()) {
destDir.mkdirs();
}
createAdastraSubsetForFile(sourceDir, destDir, caseIds);
LOG.info("Finished Creating Adastra Subset");
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception {
File[] files = sourceDir.listFiles();
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String name = sourceFile.getName();
File destFile = new File(destDir, name);
if (sourceFile.isDirectory()) {
if (!destFile.exists()) {
destFile.mkdirs();
}
createAdastraSubsetForFile(sourceFile, destFile, caseIds);
} else {
if (destFile.exists()) {
destFile.delete();
}
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(name);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
FileReader fr = new FileReader(sourceFile);
BufferedReader br = new BufferedReader(fr);
//fully quote destination file to fix CRLF in columns
CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|');
CSVParser parser = new CSVParser(br, format);
int filterColumn = -1;
//CaseRef column at 0
if (name.contains("NOTES") || name.contains("CASEQUESTIONS") ||
name.contains("OUTCOMES") || name.contains("CONSULTATION") ||
name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") ||
name.contains("PATIENT")) {
filterColumn = 0;
} else if (name.contains("CASE")) {
//CaseRef column at 2
filterColumn = 2;
} else if (name.contains("PROVIDER")) {
//CaseRef column at 7
filterColumn = 7;
} else {
//if no patient column, just copy the file
parser.close();
LOG.info("Copying non-patient file " + sourceFile);
copyFile(sourceFile, destFile);
continue;
}
PrintWriter fw = new PrintWriter(destFile);
BufferedWriter bw = new BufferedWriter(fw);
CSVPrinter printer = new CSVPrinter(bw, format);
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String caseId = csvRecord.get(filterColumn);
if (caseIds.contains(caseId)) {
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
}
}
}
private static void exportFhirToCsv(UUID serviceId, String destinationPath) {
try {
File dir = new File(destinationPath);
if (dir.exists()) {
dir.mkdirs();
}
Map<String, CSVPrinter> hmPrinters = new HashMap<>();
EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId);
SessionImpl session = (SessionImpl) entityManager.getDelegate();
Connection connection = session.connection();
PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current");
LOG.debug("Running query");
ResultSet rs = ps.executeQuery();
LOG.debug("Got result set");
while (rs.next()) {
String id = rs.getString(1);
String type = rs.getString(2);
String json = rs.getString(3);
CSVPrinter printer = hmPrinters.get(type);
if (printer == null) {
String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv");
FileWriter fileWriter = new FileWriter(new File(path));
BufferedWriter bufferedWriter = new BufferedWriter(fileWriter);
CSVFormat format = CSVFormat.DEFAULT
.withHeader("resource_id", "resource_json")
.withDelimiter('\t')
.withEscape((Character) null)
.withQuote((Character) null)
.withQuoteMode(QuoteMode.MINIMAL);
printer = new CSVPrinter(bufferedWriter, format);
hmPrinters.put(type, printer);
}
printer.printRecord(id, json);
}
for (String type : hmPrinters.keySet()) {
CSVPrinter printer = hmPrinters.get(type);
printer.flush();
printer.close();
}
ps.close();
entityManager.close();
} catch (Throwable t) {
LOG.error("", t);
}
}
private static void fixTPPNullOrgs(String sourceDir, String orgODS) throws Exception {
final String COLUMN_ORG = "IDOrganisationVisibleTo";
File[] files = new File(sourceDir).listFiles();
if (files == null)
return;
LOG.info("Found " + files.length + " files in " + sourceDir);
for (File sourceFile : files) {
String sourceFileName = sourceFile.getName();
if (sourceFile.isDirectory()) {
fixTPPNullOrgs(sourceFileName, orgODS);
} else {
LOG.info("Checking file " + sourceFile);
//skip any non-CSV file
String ext = FilenameUtils.getExtension(sourceFileName);
if (!ext.equalsIgnoreCase("csv")) {
LOG.info("Skipping as not a CSV file");
continue;
}
Charset encoding = Charset.forName("CP1252");
InputStreamReader reader =
new InputStreamReader(
new BufferedInputStream(
new FileInputStream(sourceFile)), encoding);
CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader();
CSVParser parser = new CSVParser(reader, format);
Map<String, Integer> headerMap = parser.getHeaderMap();
if (!headerMap.containsKey(COLUMN_ORG)) {
//if no COLUMN_ORG column, ignore
LOG.info("Ignoring file with no " + COLUMN_ORG + " column: " + sourceFile);
parser.close();
continue;
}
String[] columnHeaders = new String[headerMap.size()];
Iterator<String> headerIterator = headerMap.keySet().iterator();
while (headerIterator.hasNext()) {
String headerName = headerIterator.next();
int headerIndex = headerMap.get(headerName);
columnHeaders[headerIndex] = headerName;
}
String destFileName = sourceFileName.concat(".FIXED");
BufferedWriter bw =
new BufferedWriter(
new OutputStreamWriter(
new FileOutputStream(destFileName), encoding));
CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders));
//iterate down the file and look at Org Column
Iterator<CSVRecord> csvIterator = parser.iterator();
while (csvIterator.hasNext()) {
CSVRecord csvRecord = csvIterator.next();
String fileOrgODS = csvRecord.get(COLUMN_ORG);
//set the empty value to that orgODS value passed in
if (Strings.isNullOrEmpty(fileOrgODS)) {
Map <String, String> recordMap = csvRecord.toMap();
recordMap.put(COLUMN_ORG, String.valueOf(orgODS));
List<String> alteredCsvRecord = new ArrayList<String>();
for (String key : columnHeaders) {
alteredCsvRecord.add(recordMap.get(key));
}
printer.printRecord(alteredCsvRecord);
printer.flush();
} else {
if (!fileOrgODS.equalsIgnoreCase(orgODS)) {
parser.close();
printer.flush();
printer.close();
throw new Exception("File contains different ODS codes to parameter value - aborting");
}
//write the record back unchanged
printer.printRecord(csvRecord);
printer.flush();
}
}
parser.close();
printer.close();
//Finally, delete source file and rename the fixed destination file back to source
sourceFile.delete();
new File (destFileName).renameTo(new File (sourceFileName));
}
}
}
}
/*class ResourceFiler extends FhirResourceFiler {
public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError,
List<UUID> batchIdsCreated, int maxFilingThreads) {
super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads);
}
private List<Resource> newResources = new ArrayList<>();
public List<Resource> getNewResources() {
return newResources;
}
@Override
public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling saveAdminResource");
}
@Override
public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deleteAdminResource");
}
@Override
public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
for (Resource resource: resources) {
if (mapIds) {
IdHelper.mapIds(getServiceId(), getSystemId(), resource);
}
newResources.add(resource);
}
}
@Override
public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception {
throw new Exception("shouldn't be calling deletePatientResource");
}
}*/
/*
class MoveToS3Runnable implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(MoveToS3Runnable.class);
private List<FileInfo> files = null;
private AtomicInteger done = null;
public MoveToS3Runnable(List<FileInfo> files, AtomicInteger done) {
this.files = files;
this.done = done;
}
@Override
public void run() {
try {
doWork();
} catch (Exception ex) {
LOG.error("", ex);
}
}
private void doWork() throws Exception {
SourceFileMappingDalI db = DalProvider.factorySourceFileMappingDal();
//write to database
//Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>();
for (FileInfo info: files) {
String path = info.getFilePath();
InputStream inputStream = FileHelper.readFileFromSharedStorage(path);
ZipInputStream zis = new ZipInputStream(inputStream);
ZipEntry entry = zis.getNextEntry();
if (entry == null) {
throw new Exception("No entry in zip file " + path);
}
byte[] entryBytes = IOUtils.toByteArray(zis);
String json = new String(entryBytes);
inputStream.close();
ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(json);
ResourceWrapper wrapper = new ResourceWrapper();
String versionStr = FilenameUtils.getBaseName(path);
wrapper.setVersion(UUID.fromString(versionStr));
Date d = info.getLastModified();
wrapper.setCreatedAt(d);
File f = new File(path);
f = f.getParentFile();
String resourceIdStr = f.getName();
wrapper.setResourceId(UUID.fromString(resourceIdStr));
f = f.getParentFile();
String resourceTypeStr = f.getName();
wrapper.setResourceType(resourceTypeStr);
f = f.getParentFile();
String serviceIdStr = f.getName();
wrapper.setServiceId(UUID.fromString(serviceIdStr));
Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>();
batch.put(wrapper, audit);
try {
db.saveResourceMappings(batch);
} catch (Exception ex) {
String msg = ex.getMessage();
if (msg.indexOf("Duplicate entry") == -1) {
throw ex;
}
}
*/
}*//*
/*if (batch.size() > 5) {
db.saveResourceMappings(batch);
batch.clear();
int nowDone = done.incrementAndGet();
if (nowDone % 1000 == 0) {
LOG.debug("Done " + nowDone + " / " + files.size());
}
}
*/
}*//*
/*if (!batch.isEmpty()) {
db.saveResourceMappings(batch);
batch.clear();
}
}*/
class PopulateDataDateCallable implements Callable {
private static final Logger LOG = LoggerFactory.getLogger(PopulateDataDateCallable.class);
private static ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal();
private UUID exchangeId = null;
private AtomicInteger fixed = null;
public PopulateDataDateCallable(UUID exchangeId, AtomicInteger fixed) {
this.exchangeId = exchangeId;
this.fixed = fixed;
}
private void doWork() throws Exception {
Exchange exchange = exchangeDal.getExchange(exchangeId);
//check if already done
String existingVal = exchange.getHeader(HeaderKeys.DataDate);
String software = exchange.getHeader(HeaderKeys.SourceSystem);
String version = exchange.getHeader(HeaderKeys.SystemVersion);
if (!Strings.isNullOrEmpty(existingVal)) {
LOG.info("Already done exchange " + exchange.getId() + " software " + software + " version " + version);
markAsDone();
return;
}
String body = exchange.getBody();
if (body.equals("[]")) {
LOG.error("Empty body found in exchange " + exchange.getId() + " software " + software + " version " + version);
markAsDone();
return;
}
Date lastDataDate = OpenEnvelope.calculateLastDataDate(software, version, body);
if (lastDataDate == null) {
LOG.error("Failed to calculate data for exchange " + exchange.getId() + " software " + software + " version " + version);
markAsDone();
return;
}
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(OpenEnvelope.DATA_DATE_FORMAT);
exchange.setHeader(HeaderKeys.DataDate, simpleDateFormat.format(lastDataDate));
exchangeDal.save(exchange);
//mark as done
markAsDone();
fixed.incrementAndGet();
}
private void markAsDone() throws Exception {
EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager();
auditEntityManager.getTransaction().begin();
SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate();
Connection auditConnection = auditSession.connection();
String sql = "UPDATE drewtest.exchange_ids SET done = 1 WHERE id = ?";
PreparedStatement ps = auditConnection.prepareStatement(sql);
ps.setString(1, exchangeId.toString());
ps.executeUpdate();
auditEntityManager.getTransaction().commit();
ps.close();
auditEntityManager.close();
//LOG.debug("Marked as done using: " + sql);
}
@Override
public Object call() throws Exception {
try {
doWork();
} catch (Throwable ex) {
LOG.error("Error with " + exchangeId, ex);
}
return null;
}
}
class TestRabbitConsumer extends DefaultConsumer {
private static final Logger LOG = LoggerFactory.getLogger(TestRabbitConsumer.class);
public TestRabbitConsumer(Channel channel) {
super(channel);
}
@Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] bytes) throws IOException {
long deliveryTag = envelope.getDeliveryTag();
String bodyStr = new String(bytes, "UTF-8");
LOG.info("Received exchange body: " + bodyStr);
try {
Thread.sleep(1000);
} catch (Throwable t) {
LOG.error("", t);
}
this.getChannel().basicAck(deliveryTag, false);
}
}
|
package monoxide.forgebackup;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import net.minecraft.command.ICommandSender;
import net.minecraft.server.MinecraftServer;
import net.minecraft.util.IProgressUpdate;
import net.minecraft.world.MinecraftException;
import net.minecraft.world.WorldServer;
import com.google.common.collect.Lists;
public class CommandBackup extends CommandBackupBase {
public CommandBackup(MinecraftServer server) {
super(server);
}
@Override
public String getCommandName() {
return "backup";
}
@Override
public int getRequiredPermissionLevel() {
return 4;
}
@Override
public void processCommand(ICommandSender sender, String[] args) {
boolean failure = false;
notifyBackupAdmins(sender, "ForgeBackup.backup.start");
notifyBackupAdmins(sender, "ForgeBackup.save.disabled");
toggleSavability(false);
try
{
notifyBackupAdmins(sender, "ForgeBackup.save.force");
forceSaveAllWorlds();
notifyBackupAdmins(sender, "ForgeBackup.backup.progress");
doBackup(sender);
}
catch (MinecraftException e)
{
notifyBackupAdmins(sender, Level.SEVERE, "ForgeBackup.backup.aborted");
BackupLog.log(Level.SEVERE, e, e.getMessage());
return;
} catch (IOException e) {
notifyBackupAdmins(sender, Level.SEVERE, "ForgeBackup.backup.aborted");
BackupLog.log(Level.SEVERE, e, e.getMessage());
return;
} finally {
notifyBackupAdmins(sender, "ForgeBackup.save.enabled");
toggleSavability(true);
}
notifyBackupAdmins(sender, "ForgeBackup.backup.complete");
}
private void toggleSavability(boolean canSave) {
for (int i = 0; i < server.worldServers.length; ++i)
{
if (server.worldServers[i] != null)
{
WorldServer worldServer = server.worldServers[i];
worldServer.canNotSave = !canSave;
}
}
}
private void forceSaveAllWorlds()
throws MinecraftException
{
if (server.getConfigurationManager() != null)
{
server.getConfigurationManager().saveAllPlayerData();
}
for (int i = 0; i < server.worldServers.length; ++i)
{
if (server.worldServers[i] != null)
{
WorldServer var5 = server.worldServers[i];
boolean var6 = var5.canNotSave;
var5.canNotSave = false;
var5.saveAllChunks(true, (IProgressUpdate)null);
var5.canNotSave = var6;
}
}
}
private void doBackup(ICommandSender sender)
throws IOException
{
File backupsFolder = server.getFile("backups");
if (backupsFolder.exists() && !backupsFolder.isDirectory()) {
notifyBackupAdmins(sender, Level.WARNING, "ForgeBackup.backup.folderExists");
return;
} else if (!backupsFolder.exists()) {
backupsFolder.mkdir();
}
File backupFile = new File(backupsFolder, getBackupFileName());
ZipOutputStream backup = new ZipOutputStream(new FileOutputStream(backupFile));
List<File> saveDirectories = Lists.newArrayList(server.getFile(server.worldServers[0].getSaveHandler().getSaveDirectoryName()), server.getFile("config"));
byte[] buffer = new byte[4096];
int readBytes;
while (!saveDirectories.isEmpty()) {
File current = saveDirectories.remove(0);
for (File child : current.listFiles()) {
if (child.isDirectory()) {
saveDirectories.add(child);
} else {
backup.putNextEntry(new ZipEntry(child.getPath().substring(2)));
try {
InputStream currentStream = new FileInputStream(child);
while ((readBytes = currentStream.read(buffer)) >= 0) {
backup.write(buffer, 0, readBytes);
}
currentStream.close();
} catch (IOException e) {
BackupLog.warning("Couldn't backup file: %s", child.getPath());
}
backup.closeEntry();
}
}
}
backup.close();
}
private String getBackupFileName() {
Date now = new Date();
return String.format("%TY%Tm%Td-%TH%TM%TS.zip", now, now, now, now, now, now);
}
}
|
package ninja.egg82.plugin.utils;
import java.util.ArrayList;
import java.util.List;
import org.bukkit.GameMode;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Banner;
import org.bukkit.block.Beacon;
import org.bukkit.block.Block;
import org.bukkit.block.BlockState;
import org.bukkit.block.BrewingStand;
import org.bukkit.block.Chest;
import org.bukkit.block.CommandBlock;
import org.bukkit.block.CreatureSpawner;
import org.bukkit.block.Dispenser;
import org.bukkit.block.Dropper;
import org.bukkit.block.EndGateway;
import org.bukkit.block.FlowerPot;
import org.bukkit.block.Furnace;
import org.bukkit.block.Hopper;
import org.bukkit.block.Jukebox;
import org.bukkit.block.NoteBlock;
import org.bukkit.block.Sign;
import org.bukkit.block.Skull;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import org.bukkit.material.MaterialData;
import ninja.egg82.plugin.core.BlockData;
public final class BlockUtil {
//vars
//constructor
public BlockUtil() {
}
//public
public static Location getTopAirBlock(Location l) {
if (l == null) {
throw new IllegalArgumentException("l cannot be null.");
}
l = l.clone();
do {
while (l.getBlock().getType() != Material.AIR) {
l = l.add(0.0d, 1.0d, 0.0d);
}
while (l.add(0.0d, 1.0d, 0.0d).getBlock().getType() != Material.AIR) {
}
l.subtract(0.0d, 1.0d, 0.0d);
} while (l.getBlock().getType() != Material.AIR || l.add(0.0d, 1.0d, 0.0d).getBlock().getType() != Material.AIR);
l.subtract(0.0d, 1.0d, 0.0d);
while (l.subtract(0.0d, 1.0d, 0.0d).getBlock().getType() == Material.AIR) {
}
l.add(0.0d, 1.0d, 0.0d);
return l;
}
public static BlockData getBlock(Location location) {
if (location == null) {
throw new IllegalArgumentException("location cannot be null.");
}
return getBlock(location.getBlock());
}
public static BlockData getBlock(Block block) {
if (block == null) {
throw new IllegalArgumentException("block cannot be null.");
}
return getBlock(block.getState());
}
public static BlockData getBlock(BlockState blockState) {
if (blockState == null) {
throw new IllegalArgumentException("blockState cannot be null.");
}
Material blockType = blockState.getType();
if (blockState instanceof InventoryHolder) {
return new BlockData(((InventoryHolder) blockState).getInventory().getContents(), blockState, blockType);
} else if (blockType == Material.FLOWER_POT) {
MaterialData currentItem = ((FlowerPot) blockState).getContents();
return new BlockData((currentItem != null) ? new ItemStack[] {currentItem.toItemStack()} : null, blockState, blockType);
} else if (blockType == Material.JUKEBOX) {
Material currentItem = ((Jukebox) blockState).getPlaying();
return new BlockData((currentItem != Material.AIR && currentItem != null) ? new ItemStack[] {new ItemStack(currentItem)} : null, blockState, blockType);
}
return new BlockData(null, blockState, blockType);
}
public static void setBlock(Block block, BlockData data) {
if (block == null) {
throw new IllegalArgumentException("block cannot be null.");
}
setBlock(block.getLocation(), data);
}
public static void setBlock(Location location, BlockData data) {
if (location == null) {
throw new IllegalArgumentException("location cannot be null.");
}
if (data == null) {
throw new IllegalArgumentException("data cannot be null.");
}
BlockState blockState = location.getBlock().getState();
Material blockType = blockState.getType();
clearInventory(blockState);
blockState.setType(data.getMaterial());
if (data.getState() != null) {
setBlockData(blockState, data.getState());
}
if (data.getInventory() != null) {
if (blockState instanceof InventoryHolder) {
((InventoryHolder) blockState).getInventory().setContents(data.getInventory());
} else if (blockType == Material.FLOWER_POT) {
((FlowerPot) blockState).setContents(data.getInventory()[0].getData());
} else if (blockType == Material.JUKEBOX) {
((Jukebox) blockState).setPlaying(data.getInventory()[0].getType());
}
}
blockState.update(true, true);
}
public static List<BlockData> getBlocks(Location center, int xRadius, int yRadius, int zRadius) {
if (center == null) {
throw new IllegalArgumentException("center cannot be null.");
}
int minX = center.getBlockX() - xRadius;
int maxX = center.getBlockX() + xRadius;
int minY = center.getBlockY() - yRadius;
int maxY = center.getBlockY() + yRadius;
int minZ = center.getBlockZ() - zRadius;
int maxZ = center.getBlockZ() + zRadius;
Location currentLocation = new Location(center.getWorld(), 0.0d, 0.0d, 0.0d);
ArrayList<BlockData> blocks = new ArrayList<BlockData>();
for (int x = minX; x <= maxX; x++) {
currentLocation.setX(x);
for (int z = minZ; z <= maxZ; z++) {
currentLocation.setZ(z);
for (int y = minY; y <= maxY; y++) {
currentLocation.setY(y);
blocks.add(getBlock(currentLocation));
}
}
}
return blocks;
}
public static void setBlocks(List<BlockData> blocks, Location center, int xRadius, int yRadius, int zRadius) {
if (blocks == null) {
throw new IllegalArgumentException("blocks cannot be null.");
}
if (blocks.size() != (xRadius * 2 + 1) * (yRadius * 2 + 1) * (zRadius * 2 + 1)) {
throw new RuntimeException("blocks is not the correct length.");
}
if (center == null) {
throw new IllegalArgumentException("center cannot be null.");
}
int minX = center.getBlockX() - xRadius;
int maxX = center.getBlockX() + xRadius;
int minY = center.getBlockY() - yRadius;
int maxY = center.getBlockY() + yRadius;
int minZ = center.getBlockZ() - zRadius;
int maxZ = center.getBlockZ() + zRadius;
Location currentLocation = new Location(center.getWorld(), 0.0d, 0.0d, 0.0d);
int i = 0;
for (int x = minX; x <= maxX; x++) {
currentLocation.setX(x);
for (int z = minZ; z <= maxZ; z++) {
currentLocation.setZ(z);
for (int y = minY; y <= maxY; y++) {
currentLocation.setY(y);
setBlock(currentLocation, blocks.get(i));
i++;
}
}
}
}
public static void clearBlocks(Location center, Material clearMaterial, int xRadius, int yRadius, int zRadius) {
if (center == null) {
throw new IllegalArgumentException("center cannot be null.");
}
if (clearMaterial == null) {
clearMaterial = Material.AIR;
}
int minX = center.getBlockX() - xRadius;
int maxX = center.getBlockX() + xRadius;
int minY = center.getBlockY() - yRadius;
int maxY = center.getBlockY() + yRadius;
int minZ = center.getBlockZ() - zRadius;
int maxZ = center.getBlockZ() + zRadius;
Location currentLocation = new Location(center.getWorld(), 0.0d, 0.0d, 0.0d);
BlockState blockState = null;
for (int x = minX; x <= maxX; x++) {
currentLocation.setX(x);
for (int z = minZ; z <= maxZ; z++) {
currentLocation.setZ(z);
for (int y = minY; y <= maxY; y++) {
currentLocation.setY(y);
blockState = currentLocation.getBlock().getState();
clearInventory(blockState);
blockState.setType(clearMaterial);
blockState.update(true, true);
}
}
}
}
public static void breakNaturally(BlockState state, Location location, GameMode gameMode, ItemStack tool) {
if (state == null) {
throw new IllegalArgumentException("state cannot be null.");
}
if (location == null) {
throw new IllegalArgumentException("location cannot be null.");
}
if (gameMode == null) {
throw new IllegalArgumentException("gameMode cannot be null.");
}
Material blockType = state.getType();
ItemStack[] items = null;
if (state instanceof InventoryHolder) {
items = ((InventoryHolder) state).getInventory().getContents();
} else if (blockType == Material.FLOWER_POT) {
MaterialData currentItem = ((FlowerPot) state).getContents();
items = (currentItem != null) ? new ItemStack[] {currentItem.toItemStack()} : null;
} else if (blockType == Material.JUKEBOX) {
Material currentItem = ((Jukebox) state).getPlaying();
items = (currentItem != Material.AIR && currentItem != null) ? new ItemStack[] {new ItemStack(currentItem)} : null;
}
if (gameMode == GameMode.CREATIVE) {
World blockWorld = location.getWorld();
if (items != null) {
for (int i = 0; i < items.length; i++) {
blockWorld.dropItemNaturally(location, items[i]);
}
}
setBlock(location, new BlockData(null, null, Material.AIR));
} else {
setBlock(location, new BlockData(items, state, blockType));
location.getBlock().breakNaturally(tool);
}
}
//private
private static void clearInventory(BlockState block) {
if (block == null) {
throw new IllegalArgumentException("block cannot be null.");
}
Material type = block.getType();
if (block instanceof InventoryHolder) {
InventoryHolder holder = (InventoryHolder) block;
holder.getInventory().clear();
}
if (type == Material.FLOWER_POT) {
((FlowerPot) block).setContents(null);
} else if (type == Material.JUKEBOX) {
((Jukebox) block).setPlaying(null);
}
}
private static void setBlockData(BlockState block, BlockState data) {
if (block == null) {
throw new IllegalArgumentException("block cannot be null.");
}
if (data == null) {
throw new IllegalArgumentException("data cannot be null.");
}
Material type = block.getType();
block.setData(data.getData());
if (type == Material.STANDING_BANNER || type == Material.WALL_BANNER) {
Banner b1 = (Banner) block;
Banner b2 = (Banner) data;
b1.setBaseColor(b2.getBaseColor());
b1.setPatterns(b2.getPatterns());
} else if (type == Material.BEACON) {
((Beacon) block).getInventory().setContents(((Beacon) data).getInventory().getContents());
} else if (type == Material.BREWING_STAND) {
BrewingStand b1 = (BrewingStand) block;
BrewingStand b2 = (BrewingStand) data;
b1.getInventory().setContents(b2.getInventory().getContents());
b1.setBrewingTime(b2.getBrewingTime());
b1.setFuelLevel(b2.getFuelLevel());
} else if (type == Material.CHEST) {
((Chest) block).getBlockInventory().setContents(((Chest) data).getBlockInventory().getContents());
} else if (type == Material.COMMAND) {
CommandBlock b1 = (CommandBlock) block;
CommandBlock b2 = (CommandBlock) data;
b1.setName(b2.getName());
b1.setCommand(b2.getCommand());
} else if (type == Material.FURNACE || type == Material.BURNING_FURNACE) {
Furnace b1 = (Furnace) block;
Furnace b2 = (Furnace) data;
b1.setBurnTime(b2.getBurnTime());
b1.setCookTime(b2.getCookTime());
b1.getInventory().setContents(b2.getInventory().getContents());
} else if (type == Material.MOB_SPAWNER) {
CreatureSpawner b1 = (CreatureSpawner) block;
CreatureSpawner b2 = (CreatureSpawner) data;
b1.setSpawnedType(b2.getSpawnedType());
b1.setDelay(b2.getDelay());
b1.setSpawnedType(b2.getSpawnedType());
} else if (type == Material.DISPENSER) {
((Dispenser) block).getInventory().setContents(((Dispenser) data).getInventory().getContents());
} else if (type == Material.DROPPER) {
((Dropper) block).getInventory().setContents(((Dropper) data).getInventory().getContents());
} else if (type.toString().equalsIgnoreCase("end_gateway")) {
EndGateway b1 = (EndGateway) block;
EndGateway b2 = (EndGateway) data;
b1.setExactTeleport(b2.isExactTeleport());
b1.setExitLocation(b2.getExitLocation());
} else if (type == Material.FLOWER_POT) {
((FlowerPot) block).setContents(((FlowerPot) data).getContents());
} else if (type == Material.HOPPER) {
((Hopper) block).getInventory().setContents(((Hopper) data).getInventory().getContents());
} else if (type == Material.JUKEBOX) {
((Jukebox) block).setPlaying(((Jukebox) data).getPlaying());
} else if (type == Material.NOTE_BLOCK) {
((NoteBlock) block).setNote(((NoteBlock) data).getNote());
} else if (type == Material.SIGN_POST || type == Material.WALL_SIGN) {
Sign b1 = (Sign) block;
String[] lines = ((Sign) data).getLines();
for (int j = 0; j < lines.length; j++) {
b1.setLine(j, lines[j]);
}
} else if (type == Material.SKULL) {
Skull b1 = (Skull) block;
Skull b2 = (Skull) data;
b1.setOwningPlayer(b2.getOwningPlayer());
b1.setRotation(b2.getRotation());
b1.setSkullType(b2.getSkullType());
}
}
}
|
package org.autonomous4j.control;
import java.util.ArrayList;
import org.autonomous4j.interfaces.A4jBrain2D;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.autonomous4j.listeners.xy.A4jLandListener;
import org.autonomous4j.physical.A4jLandController;
import org.autonomous4j.tracking.A4jBlackBox;
import org.autonomous4j.tracking.A4jBlackBox.Movement;
/**
*
* @author Mark Heckler (mark.heckler@gmail.com, @mkheck)
*/
public class A4jBrainL implements A4jBrain2D {
private static final A4jBrainL brain = new A4jBrainL();
private final A4jLandController controller = new A4jLandController();
private final List<A4jLandListener> listeners = new ArrayList<>();
//private NavData currentNav;
//private final A4jBlackBox recorder;
private boolean isRecording;
public enum Direction {LEFT, RIGHT, FORWARD};
private A4jBrainL() {
//this.recorder = new A4jBlackBox();
isRecording = true;
}
public static A4jBrainL getInstance() {
return brain;
}
@Override
public boolean connect() {
try {
controller.connect();
// Local MQTT server
listeners.add(new A4jLandListener());
// Remote MQTT cloud servers
// listeners.add(new A4jLandListener("tcp://m11.cloudmqtt.com:14655")
// .setUserName("<userID>")
// .setPassword("<password>"));
listeners.add(new A4jLandListener("tcp://iot.eclipse.org:1883"));
listeners.stream().forEach((listener) -> controller.addObserver(listener.connect()));
} catch (Exception ex) {
System.err.println("Exception creating new drone connection: " + ex.getMessage());
return false;
}
return true;
}
@Override
public void disconnect() {
if (controller != null) {
if (!listeners.isEmpty()) {
listeners.stream().forEach((listener) -> listener.disconnect());
// After disconnecting the listeners from their respective MQTT
// servers, we now delete the controller's references to them.
controller.deleteObservers();
}
controller.disconnect();
}
//recorder.shutdown();
}
/**
* Convenience (pass-through) method for more fluent API.
* @param ms Long variable specifying a number of milliseconds.
* @return A4jBrainL object (allows command chaining/fluency.
* @see #hold(long)
*/
@Override
public A4jBrainL doFor(long ms) {
return hold(ms);
}
@Override
public A4jBrainL hold(long ms) {
System.out.println("Brain.hold for " + ms + " milliseconds...");
try {
Thread.sleep(ms);
if (isRecording) {
//recorder.recordDuration(ms);
}
} catch (InterruptedException e) {
e.printStackTrace();
Logger.getLogger(A4jBrainL.class.getName()).log(Level.SEVERE, null, e);
}
return this;
}
@Override
public A4jBrainL stay() {
System.out.println("Brain.stay");
controller.stop();
if (isRecording) {
//recorder.recordAction(A4jBlackBox.Action.STAY);
}
return this;
}
@Override
public A4jBrainL forward(long distance) {
System.out.println("Brain.forward");
if (isRecording) {
//recorder.recordAction(A4jBlackBox.Action.FORWARD, speed);
}
controller.forward(distance);
return this;
}
@Override
public A4jBrainL backward(long distance) {
System.out.println("Brain.backward");
if (isRecording) {
//recorder.recordAction(A4jBlackBox.Action.BACKWARD, speed);
}
controller.back(distance);
return this;
}
public A4jBrainL patrol() {
long distF, distL, distR;
// Save this so we can finish here (more or less)
distL = controller.pingLeft();
distR = controller.pingRight();
distF = controller.pingForward();
// The rest of the pattern is identical until the final positioning movement
// We do capture the initial position of the drone from the wall, however
for (int i=0; i<4;i++) {
forward(20);
distL = controller.pingLeft();
distR = controller.pingRight();
controller.pingForward();
turn(distL < distR ? Direction.LEFT : Direction.RIGHT, 180L);
}
return this;
}
/*
This will choose "best" direction based upon how close
*/
public A4jBrainL patrolPerimeter() {
long distF, distL, distR, distFromWall, distToCorner;
Direction startDir, turnDir;
final long STOP_DIST = 40;
distL = controller.pingLeft();
distR = controller.pingRight();
distF = controller.pingForward();
// ClosER wall wins the prize
turnDir = distL < distR ? Direction.LEFT : Direction.RIGHT;
// ClosEST wall wins the grand prize
startDir = distF < Math.min(distL, distR) ? Direction.FORWARD : turnDir;
if (startDir != Direction.FORWARD) {
// Turn to face target (closest) wall
turn(turnDir);
}
// The rest of the pattern is identical until the final positioning movement
// We do capture the initial position of the drone from the wall, however
distFromWall = pingMove(STOP_DIST);
turn(turnDir);
distToCorner = pingMove(STOP_DIST);
turn(turnDir);
pingMove(STOP_DIST);
turn(turnDir);
pingMove(STOP_DIST);
turn(turnDir);
pingMove(STOP_DIST);
turn(turnDir);
pingMove(distToCorner);
turn(turnDir);
pingMove(distFromWall);
turn(turnDir); // Turn 180 degrees to regain initial bearing
if (startDir == Direction.FORWARD) {
turn(turnDir);
}
return this;
}
/*
This will choose "best" direction based upon how far away
*/
public A4jBrainL patrolBlanket() {
long distF, distL, distR, distFromWall, distToCorner;
Direction dir;
final long STOP_DIST = 50;
for (int i=0; i<5;i++) {
distL = controller.pingLeft();
distR = controller.pingRight();
distF = controller.pingForward();
// Furthest wall wins the prize & determines best direction for turn
dir = distL > distR ? Direction.LEFT : Direction.RIGHT;
// If ALVIN finds himself in a corner, he does a 180, then re-evaluates
if (Math.max(distF, Math.max(distL, distR)) < STOP_DIST) {
turn(dir, 180);
} else {
dir = distF > Math.max(distL, distR) ? Direction.FORWARD : dir;
if (dir != Direction.FORWARD) {
turn(dir);
}
distFromWall = pingMove(STOP_DIST);
}
}
return this;
}
private Long pingMove(Long stopDistance) {
Long distance = controller.pingForward();
forward((distance - stopDistance) > 0 ?
distance - stopDistance :
0); // Stop specified distance (in cm) from wall
return distance;
}
public A4jBrainL doBox(A4jBrainL.Direction dir, long cmMaxDistance) {
forward(cmMaxDistance/2); // N center of box
turn(dir);
forward(cmMaxDistance/2); // To NW corner
turn(dir);
forward(cmMaxDistance); // To SW corner
turn(dir);
forward(cmMaxDistance); // To SE corner
turn(dir);
forward(cmMaxDistance); // To NE corner
turn(dir);
forward(cmMaxDistance/2); // Return to N center
turn(dir);
forward(cmMaxDistance/2); // Return to box center (approximadamente)
turn(dir); // Turn twice to return to original bearing (mas o menos)
turn(dir);
return this;
}
private A4jBrainL turn(A4jBrainL.Direction dir) {
if (dir == Direction.LEFT) {
left(90);
} else { // Direction.RIGHT
right(90);
}
return this;
}
private A4jBrainL turn(A4jBrainL.Direction dir, long degrees) {
if (dir == Direction.LEFT) {
left(degrees);
} else { // Direction.RIGHT
right(degrees);
}
return this;
}
@Override
public A4jBrainL goHome() {
//processRecordedMovements(recorder.home());
return this;
}
@Override
public A4jBrainL replay() {
//processRecordedMovements(recorder.getRecording());
return this;
}
@Override
public A4jBrainL left(long degrees) {
System.out.println("Turn left " + degrees + " degrees.");
if (isRecording) {
//recorder.recordAction(A4jBlackBox.Action.LEFT, speed);
}
// MAH: Add in speed/duration/bearing.
// MAH: Add in direction/distance? (enh)
controller.left(degrees);
return this;
}
@Override
public A4jBrainL right(long degrees) {
System.out.println("Turn right " + degrees + " degrees.");
if (isRecording) {
//recorder.recordAction(A4jBlackBox.Action.RIGHT, speed);
}
controller.right(degrees);
return this;
}
@Override
public void processRecordedMovements(List<Movement> moves) {
// Disable recording for playback
isRecording = false;
// MAH: Fix this to replay (after recording) distances/degrees.
for (Movement curMov : moves) {
switch(curMov.getAction()) {
case FORWARD:
forward(0);
break;
case BACKWARD:
backward(0);
break;
case RIGHT:
right(0);
break;
case LEFT:
left(0);
break;
case STAY:
stay();
break;
}
hold(curMov.getDuration());
System.out.println(curMov);
}
// Re-enable recording
isRecording = true;
}
}
|
package org.ensembl.healthcheck;
import java.util.*;
import java.util.logging.*;
import java.sql.*;
import java.io.*;
import junit.framework.*;
import org.ensembl.healthcheck.util.*;
public class TestRunner {
private static String version = "$Id$";
private ArrayList allTests; // will hold an instance of each test
private ArrayList groupsToRun;
private Properties dbProps;
private static Logger logger = Logger.getLogger("org.ensembl.healthcheck.TestRunner");
/** Creates a new instance of TestRunner */
public TestRunner() {
groupsToRun = new ArrayList();
} // TestRunner
public static void main(String[] args) {
TestRunner tr = new TestRunner();
System.out.println(tr.getVersion());
tr.parseCommandLine(args);
tr.setupLogging();
tr.readPropertiesFile();
//tr.showDatabaseList();
tr.findAllTests();
tr.runAllTests();
} // main
private void readPropertiesFile() {
String propsFile = System.getProperty("user.dir") + System.getProperty("file.separator") + "database.properties";
dbProps = Utils.readPropertiesFile(propsFile);
logger.fine("Read database properties from " + propsFile);
Enumeration e = dbProps.propertyNames();
String propName;
while (e.hasMoreElements()) {
propName = (String)e.nextElement();
logger.finer("\t" + propName + " = " + dbProps.getProperty(propName));
}
} // readPropertiesFile
private void parseCommandLine(String[] args) {
if (args.length == 0) {
printUsage();
System.exit(1);
} else {
for (int i=0; i < args.length; i++) {
groupsToRun.add(args[i]);
System.out.println("Will run tests in group " + args[i]);
}
}
} // parseCommandLine
private void setupLogging() {
logger.setUseParentHandlers(false); // stop parent logger getting the message
Handler myHandler = new MyStreamHandler(System.out, new LogFormatter());
myHandler.setLevel(Level.FINEST);
logger.addHandler(myHandler);
logger.setLevel(Level.FINEST);
logger.info("Set logging level to " + logger.getLevel().getName());
} // setupLogging
private void printUsage() {
System.out.println("\nUsage: TestRunner {group1} {group2} ...\n");
} // printUsage
public String getVersion() {
// strip off first and last few chars of version since these are only used by CVS
return version.substring(5, version.length()-2);
} // getVersion
public String[] getListOfDatabaseNames(String regexp) {
Connection conn;
String[] databaseNames = null;
// open connection
try {
conn = DBUtils.openConnection(dbProps.getProperty("driver", "org.gjt.mm.mysql.Driver"),
dbProps.getProperty("databaseURL", "kaka.sanger.ac.uk"),
dbProps.getProperty("user", "anonymous"),
dbProps.getProperty("password", ""));
logger.fine("Opened connection to " + dbProps.getProperty("databaseURL", "kaka.sanger.ac.uk") + " as " + dbProps.getProperty("user", "anonymous"));
databaseNames = DBUtils.listDatabases(conn, regexp);
if (databaseNames.length == 0) {
logger.warning("No database names matched");
}
conn.close();
logger.fine("Connection closed");
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
return databaseNames;
} // getDatabaseList
private void showDatabaseList(String regexp) {
logger.fine("Listing databases matching " + regexp + " :\n");
String[] databaseList = getListOfDatabaseNames(regexp);
for (int i = 0; i < databaseList.length; i++) {
logger.fine("\t" + databaseList[i]);
}
} // showDatabaseList
private void findAllTests() {
allTests = new ArrayList();
// find all classes that extend org.ensembl.healthcheck.EnsTestCase
String thisClassName = this.getClass().getName();
String packageName = thisClassName.substring(0, thisClassName.lastIndexOf("."));
String directoryName = packageName.replace('.', File.separatorChar);
logger.finest("Package name: " + packageName + " Directory name: " + directoryName);
File f = new File(System.getProperty("user.dir") + File.separator + "src" + File.separator + directoryName);
System.out.println("f.getName(): " + f.getPath());
ClassFileFilenameFilter cnff = new ClassFileFilenameFilter();
File[] classFiles = f.listFiles(cnff);
logger.finer("Examining " + classFiles.length + " class files ...");
// check if each class file extends EnsTestCase by checking its type
// need to avoid trying to instantiate the abstract class EnsTestCase iteslf
Class newClass;
Object obj = new Object();
String baseClassName;
for (int i = 0; i < classFiles.length; i++) {
logger.finest(classFiles[i].getName());
baseClassName = classFiles[i].getName().substring(0, classFiles[i].getName().lastIndexOf("."));
try {
newClass = Class.forName(packageName + "." + baseClassName);
String className = newClass.getName();
if (!className.equals("org.ensembl.healthcheck.EnsTestCase") &&
!className.substring(className.length()-4).equals("Test") ) { // ignore JUnit tests
obj = newClass.newInstance();
}
} catch (Exception e) {
e.printStackTrace();
}
if (obj instanceof org.ensembl.healthcheck.EnsTestCase && !allTests.contains(obj)) {
((EnsTestCase)obj).init(this);
allTests.add(obj); // note we store an INSTANCE of the test, not just its name
logger.info("Added test case " + obj.getClass().getName());
}
} // for classFiles
logger.finer("Found " + allTests.size() + " test case classes.");
} // findAllTests
private void runAllTests() {
// check if allTests() has been populated; if not, run findAllTests()
if (allTests == null) {
findAllTests();
}
if (allTests.size() == 0) {
logger.warning("Warning: no tests found!");
return;
}
Iterator it = allTests.iterator();
while (it.hasNext()) {
EnsTestCase testCase = (EnsTestCase)it.next();
if (testCase.inGroups(groupsToRun)) {
logger.info("\tRunning test of type " + testCase.getClass().getName());
TestResult tr = testCase.run();
System.out.println("\n" + tr.getName() + " " + tr.getResult() + " " + tr.getMessage() + "\n");
// TBC
}
}
} // runAllTests
public DatabaseConnectionIterator getDatabaseConnectionIterator(String[] databaseNames) {
return new DatabaseConnectionIterator(dbProps.getProperty("driver"),
dbProps.getProperty("databaseURL"),
dbProps.getProperty("user"),
dbProps.getProperty("password"),
databaseNames);
} // getDatabaseConnectionIterator
} // TestRunner
|
package org.erhsroboticsclub.frc2014;
import edu.wpi.first.wpilibj.*;
import org.erhsroboticsclub.frc2014.utilities.Messenger;
public class Robot extends SimpleRobot {
// Motors and motor control
Talon topLeftMotor, bottomLeftMotor, topRightMotor, bottomRightMotor;
RobotDrive drive;
Joystick stick;
// Sensor inputs
Gyro gyro;
// Utility classes
Messenger msg;
public void robotInit() {
// Motors and motor control
topLeftMotor = new Talon(RobotMap.TOP_LEFT_MOTOR);
bottomLeftMotor = new Talon(RobotMap.BOTTOM_LEFT_MOTOR);
topRightMotor = new Talon(RobotMap.TOP_RIGHT_MOTOR);
bottomRightMotor = new Talon(RobotMap.BOTTOM_RIGHT_MOTOR);
drive = new RobotDrive(topLeftMotor, bottomLeftMotor,
topRightMotor, bottomRightMotor);
stick = new Joystick(RobotMap.DRIVE_JOYSTICK);
// Sensor inputs
gyro = new Gyro(RobotMap.GYRO);
// Utility classes
msg = new Messenger();
killSafety();
}
public void autonomous() {
}
public void operatorControl() {
while(isEnabled() && isOperatorControl()) {
if (!stick.getRawButton(1)) {
drive.mecanumDrive_Cartesian(stick.getX(), stick.getY(), 0, gyro.getAngle());
} else {
drive.mecanumDrive_Cartesian(stick.getX(), stick.getY(), stick.getZ(), gyro.getAngle());
}
}
}
public void test() {
while(isEnabled() && isTest()) {
}
}
private void killSafety() {
getWatchdog().kill();
drive.setSafetyEnabled(false);
}
}
|
package org.exist.xupdate;
import java.io.IOException;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.TreeMap;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.apache.log4j.Logger;
import org.exist.dom.DocumentSet;
import org.exist.dom.NodeListImpl;
import org.exist.dom.XMLUtil;
import org.exist.storage.DBBroker;
import org.exist.util.FastStringBuffer;
import org.exist.xquery.PathExpr;
import org.exist.xquery.XPathException;
import org.exist.xquery.XQueryContext;
import org.exist.xquery.parser.XQueryLexer;
import org.exist.xquery.parser.XQueryParser;
import org.exist.xquery.parser.XQueryTreeParser;
import org.exist.xquery.value.Item;
import org.exist.xquery.value.NodeValue;
import org.exist.xquery.value.Sequence;
import org.exist.xquery.value.SequenceIterator;
import org.exist.xquery.value.Type;
import org.w3c.dom.Attr;
import org.w3c.dom.Comment;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.ProcessingInstruction;
import org.w3c.dom.Text;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.InputSource;
import org.xml.sax.Locator;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.ext.LexicalHandler;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import antlr.collections.AST;
/**
* XUpdateProcessor.java
*
* @author Wolfgang Meier
*
*/
public class XUpdateProcessor implements ContentHandler, LexicalHandler {
public final static String XUPDATE_NS = "http:
private final static Logger LOG = Logger.getLogger(XUpdateProcessor.class);
private NodeListImpl contents = null;
private boolean inModification = false;
private boolean inAttribute = false;
private Modification modification = null;
private DocumentBuilder builder;
private Document doc;
private Stack stack = new Stack();
private Node currentNode = null;
private DBBroker broker;
private DocumentSet documentSet;
private List modifications = new ArrayList();
private Stack conditionals = new Stack();
private FastStringBuffer charBuf = new FastStringBuffer(6, 15, 5);
private Map variables = new TreeMap();
private Map namespaces = new HashMap(10);
/**
* Constructor for XUpdateProcessor.
*/
public XUpdateProcessor(DBBroker broker, DocumentSet docs)
throws ParserConfigurationException {
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
factory.setNamespaceAware(true);
factory.setValidating(false);
builder = factory.newDocumentBuilder();
this.broker = broker;
this.documentSet = docs;
namespaces.put("xml", "http:
}
public XUpdateProcessor() throws ParserConfigurationException {
this(null, null);
}
public void setBroker(DBBroker broker) {
this.broker = broker;
}
public void setDocumentSet(DocumentSet docs) {
this.documentSet = docs;
}
/**
* Parse the input source into a set of modifications.
*
* @param is
* @return an array of type Modification
* @throws ParserConfigurationException
* @throws IOException
* @throws SAXException
*/
public Modification[] parse(InputSource is)
throws ParserConfigurationException, IOException, SAXException {
SAXParserFactory saxFactory = SAXParserFactory.newInstance();
saxFactory.setNamespaceAware(true);
saxFactory.setValidating(false);
SAXParser sax = saxFactory.newSAXParser();
XMLReader reader = sax.getXMLReader();
reader.setProperty(
"http://xml.org/sax/properties/lexical-handler",
this);
reader.setContentHandler(this);
reader.parse(is);
Modification mods[] = new Modification[modifications.size()];
return (Modification[]) modifications.toArray(mods);
}
/**
* @see org.xml.sax.ContentHandler#setDocumentLocator(org.xml.sax.Locator)
*/
public void setDocumentLocator(Locator locator) {
}
/**
* @see org.xml.sax.ContentHandler#startDocument()
*/
public void startDocument() throws SAXException {
}
/**
* @see org.xml.sax.ContentHandler#endDocument()
*/
public void endDocument() throws SAXException {
}
/**
* @see org.xml.sax.ContentHandler#startPrefixMapping(java.lang.String, java.lang.String)
*/
public void startPrefixMapping(String prefix, String uri)
throws SAXException {
namespaces.put(prefix, uri);
}
/**
* @see org.xml.sax.ContentHandler#endPrefixMapping(java.lang.String)
*/
public void endPrefixMapping(String prefix) throws SAXException {
namespaces.remove(prefix);
}
/**
* @see org.xml.sax.ContentHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)
*/
public void startElement(
String namespaceURI,
String localName,
String qName,
Attributes atts)
throws SAXException {
// save accumulated character content
if (inModification && charBuf.length() > 0) {
final String normalized =
charBuf.getNormalizedString(FastStringBuffer.SUPPRESS_BOTH);
if (normalized.length() > 0) {
Text text = doc.createTextNode(normalized);
if (stack.isEmpty()) {
//LOG.debug("appending text to fragment: " + text.getData());
contents.add(text);
} else {
Element last = (Element) stack.peek();
last.appendChild(text);
}
}
charBuf.setLength(0);
}
if (namespaceURI.equals(XUPDATE_NS)) {
if (localName.equals("modifications")) {
startModifications(atts);
return;
}
// variable declaration
if (localName.equals("variable")) {
startVariableDecl(atts);
return;
}
String select = null;
if ("if".equals(localName)) {
if (inModification)
throw new SAXException("xupdate:if is not allowed inside a modification");
select = atts.getValue("test");
Conditional cond = new Conditional(broker, documentSet, select, namespaces);
conditionals.push(cond);
return;
} else if ("append".equals(localName)
|| "insert-before".equals(localName)
|| "insert-after".equals(localName)
|| "remove".equals(localName)
|| "rename".equals(localName)
|| "update".equals(localName)) {
if (inModification)
throw new SAXException("nested modifications are not allowed");
select = atts.getValue("select");
if (select == null)
throw new SAXException(
localName + " requires a select attribute");
doc = builder.newDocument();
contents = new NodeListImpl();
inModification = true;
} else if (
("element".equals(localName)
|| "attribute".equals(localName)
|| "text".equals(localName)
|| "processing-instruction".equals(localName)
|| "comment".equals(localName))
&& (!inModification))
throw new SAXException(
"creation elements are only allowed inside "
+ "a modification");
// start a new modification section
if ("append".equals(localName)) {
String child = atts.getValue("child");
modification = new Append(broker, documentSet, select, child, namespaces);
} else if ("update".equals(localName))
modification = new Update(broker, documentSet, select, namespaces);
else if ("insert-before".equals(localName))
modification =
new Insert(broker, documentSet, select, Insert.INSERT_BEFORE, namespaces);
else if ("insert-after".equals(localName))
modification =
new Insert(broker, documentSet, select, Insert.INSERT_AFTER, namespaces);
else if ("remove".equals(localName))
modification = new Remove(broker, documentSet, select, namespaces);
else if ("rename".equals(localName))
modification = new Rename(broker, documentSet, select, namespaces);
// process commands for node creation
else if ("element".equals(localName)) {
String name = atts.getValue("name");
if (name == null)
throw new SAXException("element requires a name attribute");
int p = name.indexOf(':');
String namespace = "";
String prefix = "";
if (p > -1) {
prefix = name.substring(0, p);
if (name.length() == p + 1)
throw new SAXException(
"illegal prefix in qname: " + name);
name = name.substring(p + 1);
namespace = (String) namespaces.get(prefix);
if (namespace == null) {
throw new SAXException(
"no namespace defined for prefix " + prefix);
}
}
Element elem = doc.createElementNS(namespace, name);
elem.setPrefix(prefix);
if (stack.isEmpty()) {
contents.add(elem);
} else {
Element last = (Element) stack.peek();
last.appendChild(elem);
}
stack.push(elem);
} else if ("attribute".equals(localName)) {
String name = atts.getValue("name");
if (name == null)
throw new SAXException("attribute requires a name attribute");
int p = name.indexOf(':');
String namespace = "";
if (p > -1) {
String prefix = name.substring(0, p);
if (name.length() == p + 1)
throw new SAXException(
"illegal prefix in qname: " + name);
name = name.substring(p + 1);
namespace = (String) namespaces.get(prefix);
if (namespace == null)
throw new SAXException(
"no namespace defined for prefix " + prefix);
}
Attr attrib = doc.createAttributeNS(namespace, name);
if (stack.isEmpty()) {
for(int i = 0; i < contents.getLength(); i++) {
Node n = contents.item(i);
String ns = n.getNamespaceURI();
if(ns == null) ns = "";
if(n.getNodeType() == Node.ATTRIBUTE_NODE &&
n.getLocalName().equals(name) &&
ns.equals(namespace))
throw new SAXException("The attribute " + attrib.getNodeName() + " cannot be specified twice");
}
contents.add(attrib);
} else {
Element last = (Element) stack.peek();
if(last.hasAttributeNS(namespace, name))
throw new SAXException("The attribute " + attrib.getNodeName() + " cannot be specified " +
"twice on the same element");
last.setAttributeNode(attrib);
}
inAttribute = true;
currentNode = attrib;
// process value-of
} else if ("value-of".equals(localName)) {
select = atts.getValue("select");
if (select == null)
throw new SAXException("value-of requires a select attribute");
Sequence seq = processQuery(select);
LOG.debug("Found " + seq.getLength() + " items for value-of");
Item item;
for (SequenceIterator i = seq.iterate(); i.hasNext();) {
item = i.nextItem();
if(Type.subTypeOf(item.getType(), Type.NODE)) {
Node node = XMLUtil.copyNode(doc, ((NodeValue)item).getNode());
if (stack.isEmpty())
contents.add(node);
else {
Element last = (Element) stack.peek();
last.appendChild(node);
}
} else {
try {
String value = item.getStringValue();
characters(value.toCharArray(), 0, value.length());
} catch(XPathException e) {
throw new SAXException(e.getMessage(), e);
}
}
}
}
} else if (inModification) {
Element elem = doc.createElementNS(namespaceURI, qName);
Attr a;
for (int i = 0; i < atts.getLength(); i++) {
a = doc.createAttributeNS(atts.getURI(i), atts.getQName(i));
a.setValue(atts.getValue(i));
elem.setAttributeNodeNS(a);
}
if (stack.isEmpty()) {
contents.add(elem);
} else {
Element last = (Element) stack.peek();
last.appendChild(elem);
}
stack.push(elem);
}
}
private void startVariableDecl(Attributes atts) throws SAXException {
String select = atts.getValue("select");
if (select == null)
throw new SAXException("variable declaration requires a select attribute");
String name = atts.getValue("name");
if (name == null)
throw new SAXException("variable declarations requires a name attribute");
createVariable(name, select);
}
private void startModifications(Attributes atts) throws SAXException {
String version = atts.getValue("version");
if (version == null)
throw new SAXException(
"version attribute is required for "
+ "element modifications");
if (!version.equals("1.0"))
throw new SAXException(
"Version "
+ version
+ " of XUpdate "
+ "not supported.");
}
/**
* @see org.xml.sax.ContentHandler#endElement(java.lang.String, java.lang.String, java.lang.String)
*/
public void endElement(String namespaceURI, String localName, String qName)
throws SAXException {
if (inModification && charBuf.length() > 0) {
final String normalized =
charBuf.getNormalizedString(FastStringBuffer.SUPPRESS_BOTH);
if (normalized.length() > 0) {
Text text = doc.createTextNode(normalized);
if (stack.isEmpty()) {
contents.add(text);
} else {
Element last = (Element) stack.peek();
last.appendChild(text);
}
}
charBuf.setLength(0);
}
if (XUPDATE_NS.equals(namespaceURI)) {
if ("if".equals(localName)) {
Conditional cond = (Conditional) conditionals.pop();
modifications.add(cond);
} else if (localName.equals("element")) {
stack.pop();
} else if (localName.equals("attribute")) {
inAttribute = false;
} else if (localName.equals("append")
|| localName.equals("update")
|| localName.equals("remove")
|| localName.equals("rename")
|| localName.equals("insert-before")
|| localName.equals("insert-after")) {
inModification = false;
modification.setContent(contents);
if(!conditionals.isEmpty()) {
Conditional cond = (Conditional) conditionals.peek();
cond.addModification(modification);
} else {
modifications.add(modification);
}
modification = null;
}
} else if (inModification)
stack.pop();
}
/**
* @see org.xml.sax.ContentHandler#characters(char, int, int)
*/
public void characters(char[] ch, int start, int length)
throws SAXException {
if (inModification) {
if (inAttribute) {
Attr attr = (Attr)currentNode;
String val = attr.getValue();
if(val == null)
val = new String(ch, start, length);
else
val += new String(ch, start, length);
attr.setValue(val);
} else {
charBuf.append(ch, start, length);
}
}
}
/**
* @see org.xml.sax.ContentHandler#ignorableWhitespace(char, int, int)
*/
public void ignorableWhitespace(char[] ch, int start, int length)
throws SAXException {
}
/**
* @see org.xml.sax.ContentHandler#processingInstruction(java.lang.String, java.lang.String)
*/
public void processingInstruction(String target, String data)
throws SAXException {
if (inModification && charBuf.length() > 0) {
final String normalized =
charBuf.getNormalizedString(FastStringBuffer.SUPPRESS_BOTH);
if (normalized.length() > 0) {
Text text = doc.createTextNode(normalized);
if (stack.isEmpty()) {
LOG.debug("appending text to fragment: " + text.getData());
contents.add(text);
} else {
Element last = (Element) stack.peek();
last.appendChild(text);
}
}
charBuf.setLength(0);
}
if (inModification) {
ProcessingInstruction pi =
doc.createProcessingInstruction(target, data);
if (stack.isEmpty()) {
contents.add(pi);
} else {
Element last = (Element) stack.peek();
last.appendChild(pi);
}
}
}
/**
* @see org.xml.sax.ContentHandler#skippedEntity(java.lang.String)
*/
public void skippedEntity(String name) throws SAXException {
}
private void createVariable(String name, String select)
throws SAXException {
LOG.debug("creating variable " + name + " as " + select);
Sequence result = processQuery(select);
LOG.debug("found " + result.getLength() + " for variable " + name);
variables.put(name, result);
}
private Sequence processQuery(String select) throws SAXException {
try {
XQueryContext context = new XQueryContext(broker);
context.setStaticallyKnownDocuments(documentSet);
Map.Entry entry;
for (Iterator i = namespaces.entrySet().iterator(); i.hasNext();) {
entry = (Map.Entry) i.next();
context.declareNamespace(
(String) entry.getKey(),
(String) entry.getValue());
}
for (Iterator i = variables.entrySet().iterator(); i.hasNext(); ) {
entry = (Map.Entry) i.next();
context.declareVariable(entry.getKey().toString(), entry.getValue());
}
XQueryLexer lexer = new XQueryLexer(context, new StringReader(select));
XQueryParser parser = new XQueryParser(lexer);
XQueryTreeParser treeParser = new XQueryTreeParser(context);
parser.xpath();
if (parser.foundErrors()) {
throw new SAXException(parser.getErrorMessage());
}
AST ast = parser.getAST();
LOG.debug("generated AST: " + ast.toStringTree());
PathExpr expr = new PathExpr(context);
treeParser.xpath(ast, expr);
if (treeParser.foundErrors()) {
throw new SAXException(treeParser.getErrorMessage());
}
Sequence seq = expr.eval(null, null);
return seq;
} catch (RecognitionException e) {
LOG.warn("error while creating variable", e);
throw new SAXException(e);
} catch (TokenStreamException e) {
LOG.warn("error while creating variable", e);
throw new SAXException(e);
} catch (XPathException e) {
throw new SAXException(e);
}
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#comment(char[], int, int)
*/
public void comment(char[] ch, int start, int length) throws SAXException {
if (inModification && charBuf.length() > 0) {
final String normalized =
charBuf.getNormalizedString(FastStringBuffer.SUPPRESS_BOTH);
if (normalized.length() > 0) {
Text text = doc.createTextNode(normalized);
if (stack.isEmpty()) {
//LOG.debug("appending text to fragment: " + text.getData());
contents.add(text);
} else {
Element last = (Element) stack.peek();
last.appendChild(text);
}
}
charBuf.setLength(0);
}
if (inModification) {
Comment comment = doc.createComment(new String(ch, start, length));
if (stack.isEmpty()) {
contents.add(comment);
} else {
Element last = (Element) stack.peek();
last.appendChild(comment);
}
}
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#endCDATA()
*/
public void endCDATA() throws SAXException {
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#endDTD()
*/
public void endDTD() throws SAXException {
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#endEntity(java.lang.String)
*/
public void endEntity(String name) throws SAXException {
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#startCDATA()
*/
public void startCDATA() throws SAXException {
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#startDTD(java.lang.String, java.lang.String, java.lang.String)
*/
public void startDTD(String name, String publicId, String systemId)
throws SAXException {
}
/* (non-Javadoc)
* @see org.xml.sax.ext.LexicalHandler#startEntity(java.lang.String)
*/
public void startEntity(String name) throws SAXException {
}
public void reset() {
inModification = false;
inAttribute = false;
modification = null;
doc = null;
contents = null;
stack.clear();
currentNode = null;
broker = null;
documentSet = null;
modifications.clear();
charBuf.setLength(0);
variables.clear();
namespaces.clear();
namespaces.put("xml", "http:
}
}
|
package org.htmlcleaner;
import java.util.HashMap;
public class DefaultTagProvider extends HashMap implements ITagInfoProvider {
// singleton instance, used if no other TagInfoProvider is specified
private static DefaultTagProvider _instance;
/**
* Returns singleton instance of this class.
*/
public static synchronized DefaultTagProvider getInstance() {
if (_instance == null) {
_instance = new DefaultTagProvider();
}
return _instance;
}
public DefaultTagProvider() {
TagInfo tagInfo;
tagInfo = new TagInfo("div", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("div", tagInfo);
tagInfo = new TagInfo("span", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("span", tagInfo);
tagInfo = new TagInfo("meta", TagInfo.CONTENT_NONE, TagInfo.HEAD, false, false, false);
this.put("meta", tagInfo);
tagInfo = new TagInfo("link", TagInfo.CONTENT_NONE, TagInfo.HEAD, false, false, false);
this.put("link", tagInfo);
tagInfo = new TagInfo("title", TagInfo.CONTENT_TEXT, TagInfo.HEAD, false, true, false);
this.put("title", tagInfo);
tagInfo = new TagInfo("style", TagInfo.CONTENT_TEXT, TagInfo.HEAD, false, false, false);
this.put("style", tagInfo);
tagInfo = new TagInfo("bgsound", TagInfo.CONTENT_NONE, TagInfo.HEAD, false, false, false);
this.put("bgsound", tagInfo);
tagInfo = new TagInfo("h1", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("h1,h2,h3,h4,h5,h6,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("h1", tagInfo);
tagInfo = new TagInfo("h2", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("h1,h2,h3,h4,h5,h6,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("h2", tagInfo);
tagInfo = new TagInfo("h3", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("h1,h2,h3,h4,h5,h6,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("h3", tagInfo);
tagInfo = new TagInfo("h4", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("h1,h2,h3,h4,h5,h6,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("h4", tagInfo);
tagInfo = new TagInfo("h5", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("h1,h2,h3,h4,h5,h6,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("h5", tagInfo);
tagInfo = new TagInfo("h6", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("h1,h2,h3,h4,h5,h6,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("h6", tagInfo);
tagInfo = new TagInfo("p", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("p", tagInfo);
tagInfo = new TagInfo("strong", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("strong", tagInfo);
tagInfo = new TagInfo("em", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("em", tagInfo);
tagInfo = new TagInfo("abbr", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("abbr", tagInfo);
tagInfo = new TagInfo("acronym", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("acronym", tagInfo);
tagInfo = new TagInfo("address", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("address", tagInfo);
tagInfo = new TagInfo("bdo", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("bdo", tagInfo);
tagInfo = new TagInfo("blockquote", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("blockquote", tagInfo);
tagInfo = new TagInfo("cite", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("cite", tagInfo);
tagInfo = new TagInfo("q", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("q", tagInfo);
tagInfo = new TagInfo("code", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("code", tagInfo);
tagInfo = new TagInfo("ins", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("ins", tagInfo);
tagInfo = new TagInfo("del", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("del", tagInfo);
tagInfo = new TagInfo("dfn", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("dfn", tagInfo);
tagInfo = new TagInfo("kbd", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("kbd", tagInfo);
tagInfo = new TagInfo("pre", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("pre", tagInfo);
tagInfo = new TagInfo("samp", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("samp", tagInfo);
tagInfo = new TagInfo("listing", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("listing", tagInfo);
tagInfo = new TagInfo("var", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("var", tagInfo);
tagInfo = new TagInfo("br", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
this.put("br", tagInfo);
tagInfo = new TagInfo("wbr", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
this.put("wbr", tagInfo);
tagInfo = new TagInfo("nobr", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("nobr");
this.put("nobr", tagInfo);
tagInfo = new TagInfo("xmp", TagInfo.CONTENT_TEXT, TagInfo.BODY, false, false, false);
this.put("xmp", tagInfo);
tagInfo = new TagInfo("a", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("a");
this.put("a", tagInfo);
tagInfo = new TagInfo("base", TagInfo.CONTENT_NONE, TagInfo.HEAD, false, false, false);
this.put("base", tagInfo);
tagInfo = new TagInfo("img", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
this.put("img", tagInfo);
tagInfo = new TagInfo("area", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("map");
tagInfo.defineCloseBeforeTags("area");
this.put("area", tagInfo);
tagInfo = new TagInfo("map", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("map");
this.put("map", tagInfo);
tagInfo = new TagInfo("object", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("object", tagInfo);
tagInfo = new TagInfo("param", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("param", tagInfo);
tagInfo = new TagInfo("applet", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
this.put("applet", tagInfo);
tagInfo = new TagInfo("xml", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("xml", tagInfo);
tagInfo = new TagInfo("ul", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("ul", tagInfo);
tagInfo = new TagInfo("ol", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("ol", tagInfo);
tagInfo = new TagInfo("li", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("li,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("li", tagInfo);
tagInfo = new TagInfo("dl", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("dl", tagInfo);
tagInfo = new TagInfo("dt", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("dt,dd");
this.put("dt", tagInfo);
tagInfo = new TagInfo("dd", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("dt,dd");
this.put("dd", tagInfo);
tagInfo = new TagInfo("menu", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("menu", tagInfo);
tagInfo = new TagInfo("dir", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("dir", tagInfo);
tagInfo = new TagInfo("table", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineAllowedChildrenTags("tr,tbody,thead,tfoot,colgroup,col,caption,tr");
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("tr,thead,tbody,tfoot,caption,colgroup,table,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("table", tagInfo);
tagInfo = new TagInfo("tr", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineRequiredEnclosingTags("tbody");
tagInfo.defineAllowedChildrenTags("td,th");
tagInfo.defineHigherLevelTags("thead,tfoot");
tagInfo.defineCloseBeforeTags("tr,td,th,caption,colgroup");
this.put("tr", tagInfo);
tagInfo = new TagInfo("td", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineRequiredEnclosingTags("tr");
tagInfo.defineCloseBeforeTags("td,th,caption,colgroup");
this.put("td", tagInfo);
tagInfo = new TagInfo("th", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineRequiredEnclosingTags("tr");
tagInfo.defineCloseBeforeTags("td,th,caption,colgroup");
this.put("th", tagInfo);
tagInfo = new TagInfo("tbody", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineAllowedChildrenTags("tr,form");
tagInfo.defineCloseBeforeTags("td,th,tr,tbody,thead,tfoot,caption,colgroup");
this.put("tbody", tagInfo);
tagInfo = new TagInfo("thead", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineAllowedChildrenTags("tr,form");
tagInfo.defineCloseBeforeTags("td,th,tr,tbody,thead,tfoot,caption,colgroup");
this.put("thead", tagInfo);
tagInfo = new TagInfo("tfoot", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineAllowedChildrenTags("tr,form");
tagInfo.defineCloseBeforeTags("td,th,tr,tbody,thead,tfoot,caption,colgroup");
this.put("tfoot", tagInfo);
tagInfo = new TagInfo("col", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
this.put("col", tagInfo);
tagInfo = new TagInfo("colgroup", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineAllowedChildrenTags("col");
tagInfo.defineCloseBeforeTags("td,th,tr,tbody,thead,tfoot,caption,colgroup");
this.put("colgroup", tagInfo);
tagInfo = new TagInfo("caption", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineFatalTags("table");
tagInfo.defineCloseBeforeTags("td,th,tr,tbody,thead,tfoot,caption,colgroup");
this.put("caption", tagInfo);
tagInfo = new TagInfo("form", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, true);
tagInfo.defineForbiddenTags("form");
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("option,optgroup,textarea,select,fieldset,p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("form", tagInfo);
tagInfo = new TagInfo("input", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("select,optgroup,option");
this.put("input", tagInfo);
tagInfo = new TagInfo("textarea", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("select,optgroup,option");
this.put("textarea", tagInfo);
tagInfo = new TagInfo("select", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, true);
tagInfo.defineAllowedChildrenTags("option,optgroup");
tagInfo.defineCloseBeforeTags("option,optgroup,select");
this.put("select", tagInfo);
tagInfo = new TagInfo("option", TagInfo.CONTENT_TEXT, TagInfo.BODY, false, false, true);
tagInfo.defineFatalTags("select");
tagInfo.defineCloseBeforeTags("option");
this.put("option", tagInfo);
tagInfo = new TagInfo("optgroup", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, true);
tagInfo.defineFatalTags("select");
tagInfo.defineAllowedChildrenTags("option");
tagInfo.defineCloseBeforeTags("optgroup");
this.put("optgroup", tagInfo);
tagInfo = new TagInfo("button", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeTags("select,optgroup,option");
this.put("button", tagInfo);
tagInfo = new TagInfo("label", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("label", tagInfo);
tagInfo = new TagInfo("fieldset", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("fieldset", tagInfo);
tagInfo = new TagInfo("isindex", TagInfo.CONTENT_NONE, TagInfo.BODY, true, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("isindex", tagInfo);
tagInfo = new TagInfo("script", TagInfo.CONTENT_ALL, TagInfo.HEAD_AND_BODY, false, false, false);
this.put("script", tagInfo);
tagInfo = new TagInfo("noscript", TagInfo.CONTENT_ALL, TagInfo.HEAD_AND_BODY, false, false, false);
this.put("noscript", tagInfo);
tagInfo = new TagInfo("b", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("u,i,tt,sub,sup,big,small,strike,blink,s");
this.put("b", tagInfo);
tagInfo = new TagInfo("i", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,tt,sub,sup,big,small,strike,blink,s");
this.put("i", tagInfo);
tagInfo = new TagInfo("u", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,i,tt,sub,sup,big,small,strike,blink,s");
this.put("u", tagInfo);
tagInfo = new TagInfo("tt", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,sub,sup,big,small,strike,blink,s");
this.put("tt", tagInfo);
tagInfo = new TagInfo("sub", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sup,big,small,strike,blink,s");
this.put("sub", tagInfo);
tagInfo = new TagInfo("sup", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sub,big,small,strike,blink,s");
this.put("sup", tagInfo);
tagInfo = new TagInfo("big", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sub,sup,small,strike,blink,s");
this.put("big", tagInfo);
tagInfo = new TagInfo("small", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sub,sup,big,strike,blink,s");
this.put("small", tagInfo);
tagInfo = new TagInfo("strike", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sub,sup,big,small,blink,s");
this.put("strike", tagInfo);
tagInfo = new TagInfo("blink", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sub,sup,big,small,strike,s");
this.put("blink", tagInfo);
tagInfo = new TagInfo("marquee", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("marquee", tagInfo);
tagInfo = new TagInfo("s", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
tagInfo.defineCloseInsideCopyAfterTags("b,u,i,tt,sub,sup,big,small,strike,blink");
this.put("s", tagInfo);
tagInfo = new TagInfo("hr", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("hr", tagInfo);
tagInfo = new TagInfo("font", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
this.put("font", tagInfo);
tagInfo = new TagInfo("basefont", TagInfo.CONTENT_NONE, TagInfo.BODY, true, false, false);
this.put("basefont", tagInfo);
tagInfo = new TagInfo("center", TagInfo.CONTENT_ALL, TagInfo.BODY, true, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("center", tagInfo);
tagInfo = new TagInfo("comment", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("comment", tagInfo);
tagInfo = new TagInfo("server", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("server", tagInfo);
tagInfo = new TagInfo("iframe", TagInfo.CONTENT_ALL, TagInfo.BODY, false, false, false);
this.put("iframe", tagInfo);
tagInfo = new TagInfo("embed", TagInfo.CONTENT_NONE, TagInfo.BODY, false, false, false);
tagInfo.defineCloseBeforeCopyInsideTags("a,bdo,strong,em,q,b,i,u,tt,sub,sup,big,small,strike,s,font");
tagInfo.defineCloseBeforeTags("p,address,label,abbr,acronym,dfn,kbd,samp,var,cite,code,param,xml");
this.put("embed", tagInfo);
}
public TagInfo getTagInfo(String tagName) {
return (TagInfo) get(tagName);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.